aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/compile/internal/typecheck
diff options
context:
space:
mode:
authorRuss Cox <rsc@golang.org>2020-12-23 00:41:49 -0500
committerRuss Cox <rsc@golang.org>2020-12-23 06:38:26 +0000
commitb9693d7627089204e6c2448f543c3512d86dae70 (patch)
tree0f869f5abb58568525f47d330e93ef36e1467bf2 /src/cmd/compile/internal/typecheck
parentdac0de3748cc816352da56f516506f80c33db4a5 (diff)
downloadgo-b9693d7627089204e6c2448f543c3512d86dae70.tar.gz
go-b9693d7627089204e6c2448f543c3512d86dae70.zip
[dev.regabi] cmd/compile: split out package typecheck [generated]
This commit splits the typechecking logic into its own package, the first of a sequence of CLs to break package gc into more manageable units. [git-generate] cd src/cmd/compile/internal/gc rf ' # The binary import/export has to be part of typechecking, # because we load inlined function bodies lazily, but "exporter" # should not be. Move that out of bexport.go. mv exporter exporter.markObject exporter.markType export.go # Use the typechecking helpers, so that the calls left behind # in package gc do not need access to ctxExpr etc. ex { import "cmd/compile/internal/ir" # TODO(rsc): Should not be necessary. avoid TypecheckExpr avoid TypecheckStmt avoid TypecheckExprs avoid TypecheckStmts avoid TypecheckAssignExpr avoid TypecheckCallee var n ir.Node var ns []ir.Node typecheck(n, ctxExpr) -> TypecheckExpr(n) typecheck(n, ctxStmt) -> TypecheckStmt(n) typecheckslice(ns, ctxExpr) -> TypecheckExprs(ns) typecheckslice(ns, ctxStmt) -> TypecheckStmts(ns) typecheck(n, ctxExpr|ctxAssign) -> TypecheckAssignExpr(n) typecheck(n, ctxExpr|ctxCallee) -> TypecheckCallee(n) } # Move some typechecking API to typecheck. mv syslook LookupRuntime mv substArgTypes SubstArgTypes mv LookupRuntime SubstArgTypes syms.go mv conv Conv mv convnop ConvNop mv Conv ConvNop typecheck.go mv colasdefn AssignDefn mv colasname assignableName mv Target target.go mv initname autoexport exportsym dcl.go mv exportsym Export # Export API to be called from outside typecheck. # The ones with "Typecheck" prefixes will be renamed later to drop the prefix. mv adddot AddImplicitDots mv assignconv AssignConv mv expandmeth CalcMethods mv capturevarscomplete CaptureVarsComplete mv checkMapKeys CheckMapKeys mv checkreturn CheckReturn mv dclcontext DeclContext mv dclfunc DeclFunc mv declare Declare mv dotImportRefs DotImportRefs mv declImporter DeclImporter mv variter DeclVars mv defaultlit DefaultLit mv evalConst EvalConst mv expandInline ImportBody mv finishUniverse declareUniverse mv funcbody FinishFuncBody mv funchdr StartFuncBody mv indexconst IndexConst mv initTodo InitTodoFunc mv lookup Lookup mv resolve Resolve mv lookupN LookupNum mv nodAddr NodAddr mv nodAddrAt NodAddrAt mv nodnil NodNil mv origBoolConst OrigBool mv origConst OrigConst mv origIntConst OrigInt mv redeclare Redeclared mv tostruct NewStructType mv functype NewFuncType mv methodfunc NewMethodType mv structargs NewFuncParams mv temp Temp mv tempAt TempAt mv typecheckok TypecheckAllowed mv typecheck _typecheck # make room for typecheck pkg mv typecheckinl TypecheckImportedBody mv typecheckFunc TypecheckFunc mv iimport ReadImports mv iexport WriteExports mv sysfunc LookupRuntimeFunc mv sysvar LookupRuntimeVar # Move function constructors to typecheck. mv mkdotargslice MakeDotArgs mv fixVariadicCall FixVariadicCall mv closureType ClosureType mv partialCallType PartialCallType mv capturevars CaptureVars mv MakeDotArgs FixVariadicCall ClosureType PartialCallType CaptureVars typecheckclosure func.go mv autolabel AutoLabel mv AutoLabel syms.go mv Dlist dlist mv Symlink symlink mv \ AssignDefn assignableName \ AssignConv \ CaptureVarsComplete \ DeclContext \ DeclFunc \ DeclImporter \ DeclVars \ Declare \ DotImportRefs \ Export \ InitTodoFunc \ Lookup \ LookupNum \ LookupRuntimeFunc \ LookupRuntimeVar \ NewFuncParams \ NewName \ NodAddr \ NodAddrAt \ NodNil \ Redeclared \ StartFuncBody \ FinishFuncBody \ TypecheckImportedBody \ AddImplicitDots \ CalcMethods \ CheckFuncStack \ NewFuncType \ NewMethodType \ NewStructType \ TypecheckAllowed \ Temp \ TempAt \ adddot1 \ dotlist \ addmethod \ assignconvfn \ assignop \ autotmpname \ autoexport \ bexport.go \ checkdupfields \ checkembeddedtype \ closurename \ convertop \ declare_typegen \ decldepth \ dlist \ dotpath \ expand0 \ expand1 \ expandDecl \ fakeRecvField \ fnpkg \ funcStack \ funcStackEnt \ funcarg \ funcarg2 \ funcargs \ funcargs2 \ globClosgen \ ifacelookdot \ implements \ importalias \ importconst \ importfunc \ importobj \ importsym \ importtype \ importvar \ inimport \ initname \ isptrto \ loadsys \ lookdot0 \ lookdot1 \ makepartialcall \ okfor \ okforlen \ operandType \ slist \ symlink \ tointerface \ typeSet \ typeSet.add \ typeSetEntry \ typecheckExprSwitch \ typecheckTypeSwitch \ typecheckpartialcall \ typecheckrange \ typecheckrangeExpr \ typecheckselect \ typecheckswitch \ vargen \ builtin.go \ builtin_test.go \ const.go \ func.go \ iexport.go \ iimport.go \ mapfile_mmap.go \ syms.go \ target.go \ typecheck.go \ unsafe.go \ universe.go \ cmd/compile/internal/typecheck ' rm gen.go types.go types_acc.go sed -i '' 's/package gc/package typecheck/' mapfile_read.go mkbuiltin.go mv mapfile_read.go ../typecheck # not part of default build mv mkbuiltin.go ../typecheck # package main helper mv builtin ../typecheck cd ../typecheck mv dcl.go dcl1.go mv typecheck.go typecheck1.go mv universe.go universe1.go rf ' # Sweep some small files into larger ones. # "mv sym... file1.go file.go" (after the mv file1.go file.go above) # lets us insert sym... at the top of file.go. mv okfor okforeq universe1.go universe.go mv DeclContext vargen dcl1.go Temp TempAt autotmpname NewMethodType dcl.go mv InitTodoFunc inimport decldepth TypecheckAllowed typecheck1.go typecheck.go mv inl.go closure.go func.go mv range.go select.go swt.go stmt.go mv Lookup loadsys LookupRuntimeFunc LookupRuntimeVar syms.go mv unsafe.go const.go mv TypecheckAssignExpr AssignExpr mv TypecheckExpr Expr mv TypecheckStmt Stmt mv TypecheckExprs Exprs mv TypecheckStmts Stmts mv TypecheckCall Call mv TypecheckCallee Callee mv _typecheck check mv TypecheckFunc Func mv TypecheckFuncBody FuncBody mv TypecheckImports AllImportedBodies mv TypecheckImportedBody ImportedBody mv TypecheckInit Init mv TypecheckPackage Package ' rm gen.go go.go init.go main.go reflect.go Change-Id: Iea6a7aaf6407d690670ec58aeb36cc0b280f80b0 Reviewed-on: https://go-review.googlesource.com/c/go/+/279236 Trust: Russ Cox <rsc@golang.org> Run-TryBot: Russ Cox <rsc@golang.org> TryBot-Result: Go Bot <gobot@golang.org> Reviewed-by: Matthew Dempsky <mdempsky@google.com>
Diffstat (limited to 'src/cmd/compile/internal/typecheck')
-rw-r--r--src/cmd/compile/internal/typecheck/bexport.go102
-rw-r--r--src/cmd/compile/internal/typecheck/builtin.go344
-rw-r--r--src/cmd/compile/internal/typecheck/builtin/runtime.go259
-rw-r--r--src/cmd/compile/internal/typecheck/builtin_test.go33
-rw-r--r--src/cmd/compile/internal/typecheck/const.go944
-rw-r--r--src/cmd/compile/internal/typecheck/dcl.go705
-rw-r--r--src/cmd/compile/internal/typecheck/export.go79
-rw-r--r--src/cmd/compile/internal/typecheck/func.go398
-rw-r--r--src/cmd/compile/internal/typecheck/iexport.go1614
-rw-r--r--src/cmd/compile/internal/typecheck/iimport.go1142
-rw-r--r--src/cmd/compile/internal/typecheck/mapfile_mmap.go48
-rw-r--r--src/cmd/compile/internal/typecheck/mapfile_read.go21
-rw-r--r--src/cmd/compile/internal/typecheck/mkbuiltin.go228
-rw-r--r--src/cmd/compile/internal/typecheck/stmt.go435
-rw-r--r--src/cmd/compile/internal/typecheck/subr.go793
-rw-r--r--src/cmd/compile/internal/typecheck/syms.go104
-rw-r--r--src/cmd/compile/internal/typecheck/target.go12
-rw-r--r--src/cmd/compile/internal/typecheck/typecheck.go4180
-rw-r--r--src/cmd/compile/internal/typecheck/universe.go362
19 files changed, 11803 insertions, 0 deletions
diff --git a/src/cmd/compile/internal/typecheck/bexport.go b/src/cmd/compile/internal/typecheck/bexport.go
new file mode 100644
index 0000000000..4a84bb13fa
--- /dev/null
+++ b/src/cmd/compile/internal/typecheck/bexport.go
@@ -0,0 +1,102 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typecheck
+
+import "cmd/compile/internal/types"
+
+// ----------------------------------------------------------------------------
+// Export format
+
+// Tags. Must be < 0.
+const (
+ // Objects
+ packageTag = -(iota + 1)
+ constTag
+ typeTag
+ varTag
+ funcTag
+ endTag
+
+ // Types
+ namedTag
+ arrayTag
+ sliceTag
+ dddTag
+ structTag
+ pointerTag
+ signatureTag
+ interfaceTag
+ mapTag
+ chanTag
+
+ // Values
+ falseTag
+ trueTag
+ int64Tag
+ floatTag
+ fractionTag // not used by gc
+ complexTag
+ stringTag
+ nilTag
+ unknownTag // not used by gc (only appears in packages with errors)
+
+ // Type aliases
+ aliasTag
+)
+
+var predecl []*types.Type // initialized lazily
+
+func predeclared() []*types.Type {
+ if predecl == nil {
+ // initialize lazily to be sure that all
+ // elements have been initialized before
+ predecl = []*types.Type{
+ // basic types
+ types.Types[types.TBOOL],
+ types.Types[types.TINT],
+ types.Types[types.TINT8],
+ types.Types[types.TINT16],
+ types.Types[types.TINT32],
+ types.Types[types.TINT64],
+ types.Types[types.TUINT],
+ types.Types[types.TUINT8],
+ types.Types[types.TUINT16],
+ types.Types[types.TUINT32],
+ types.Types[types.TUINT64],
+ types.Types[types.TUINTPTR],
+ types.Types[types.TFLOAT32],
+ types.Types[types.TFLOAT64],
+ types.Types[types.TCOMPLEX64],
+ types.Types[types.TCOMPLEX128],
+ types.Types[types.TSTRING],
+
+ // basic type aliases
+ types.ByteType,
+ types.RuneType,
+
+ // error
+ types.ErrorType,
+
+ // untyped types
+ types.UntypedBool,
+ types.UntypedInt,
+ types.UntypedRune,
+ types.UntypedFloat,
+ types.UntypedComplex,
+ types.UntypedString,
+ types.Types[types.TNIL],
+
+ // package unsafe
+ types.Types[types.TUNSAFEPTR],
+
+ // invalid type (package contains errors)
+ types.Types[types.Txxx],
+
+ // any type, for builtin export data
+ types.Types[types.TANY],
+ }
+ }
+ return predecl
+}
diff --git a/src/cmd/compile/internal/typecheck/builtin.go b/src/cmd/compile/internal/typecheck/builtin.go
new file mode 100644
index 0000000000..d3c30fbf50
--- /dev/null
+++ b/src/cmd/compile/internal/typecheck/builtin.go
@@ -0,0 +1,344 @@
+// Code generated by mkbuiltin.go. DO NOT EDIT.
+
+package typecheck
+
+import (
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/types"
+)
+
+var runtimeDecls = [...]struct {
+ name string
+ tag int
+ typ int
+}{
+ {"newobject", funcTag, 4},
+ {"mallocgc", funcTag, 8},
+ {"panicdivide", funcTag, 9},
+ {"panicshift", funcTag, 9},
+ {"panicmakeslicelen", funcTag, 9},
+ {"panicmakeslicecap", funcTag, 9},
+ {"throwinit", funcTag, 9},
+ {"panicwrap", funcTag, 9},
+ {"gopanic", funcTag, 11},
+ {"gorecover", funcTag, 14},
+ {"goschedguarded", funcTag, 9},
+ {"goPanicIndex", funcTag, 16},
+ {"goPanicIndexU", funcTag, 18},
+ {"goPanicSliceAlen", funcTag, 16},
+ {"goPanicSliceAlenU", funcTag, 18},
+ {"goPanicSliceAcap", funcTag, 16},
+ {"goPanicSliceAcapU", funcTag, 18},
+ {"goPanicSliceB", funcTag, 16},
+ {"goPanicSliceBU", funcTag, 18},
+ {"goPanicSlice3Alen", funcTag, 16},
+ {"goPanicSlice3AlenU", funcTag, 18},
+ {"goPanicSlice3Acap", funcTag, 16},
+ {"goPanicSlice3AcapU", funcTag, 18},
+ {"goPanicSlice3B", funcTag, 16},
+ {"goPanicSlice3BU", funcTag, 18},
+ {"goPanicSlice3C", funcTag, 16},
+ {"goPanicSlice3CU", funcTag, 18},
+ {"printbool", funcTag, 19},
+ {"printfloat", funcTag, 21},
+ {"printint", funcTag, 23},
+ {"printhex", funcTag, 25},
+ {"printuint", funcTag, 25},
+ {"printcomplex", funcTag, 27},
+ {"printstring", funcTag, 29},
+ {"printpointer", funcTag, 30},
+ {"printuintptr", funcTag, 31},
+ {"printiface", funcTag, 30},
+ {"printeface", funcTag, 30},
+ {"printslice", funcTag, 30},
+ {"printnl", funcTag, 9},
+ {"printsp", funcTag, 9},
+ {"printlock", funcTag, 9},
+ {"printunlock", funcTag, 9},
+ {"concatstring2", funcTag, 34},
+ {"concatstring3", funcTag, 35},
+ {"concatstring4", funcTag, 36},
+ {"concatstring5", funcTag, 37},
+ {"concatstrings", funcTag, 39},
+ {"cmpstring", funcTag, 40},
+ {"intstring", funcTag, 43},
+ {"slicebytetostring", funcTag, 44},
+ {"slicebytetostringtmp", funcTag, 45},
+ {"slicerunetostring", funcTag, 48},
+ {"stringtoslicebyte", funcTag, 50},
+ {"stringtoslicerune", funcTag, 53},
+ {"slicecopy", funcTag, 54},
+ {"decoderune", funcTag, 55},
+ {"countrunes", funcTag, 56},
+ {"convI2I", funcTag, 57},
+ {"convT16", funcTag, 58},
+ {"convT32", funcTag, 58},
+ {"convT64", funcTag, 58},
+ {"convTstring", funcTag, 58},
+ {"convTslice", funcTag, 58},
+ {"convT2E", funcTag, 59},
+ {"convT2Enoptr", funcTag, 59},
+ {"convT2I", funcTag, 59},
+ {"convT2Inoptr", funcTag, 59},
+ {"assertE2I", funcTag, 57},
+ {"assertE2I2", funcTag, 60},
+ {"assertI2I", funcTag, 57},
+ {"assertI2I2", funcTag, 60},
+ {"panicdottypeE", funcTag, 61},
+ {"panicdottypeI", funcTag, 61},
+ {"panicnildottype", funcTag, 62},
+ {"ifaceeq", funcTag, 64},
+ {"efaceeq", funcTag, 64},
+ {"fastrand", funcTag, 66},
+ {"makemap64", funcTag, 68},
+ {"makemap", funcTag, 69},
+ {"makemap_small", funcTag, 70},
+ {"mapaccess1", funcTag, 71},
+ {"mapaccess1_fast32", funcTag, 72},
+ {"mapaccess1_fast64", funcTag, 72},
+ {"mapaccess1_faststr", funcTag, 72},
+ {"mapaccess1_fat", funcTag, 73},
+ {"mapaccess2", funcTag, 74},
+ {"mapaccess2_fast32", funcTag, 75},
+ {"mapaccess2_fast64", funcTag, 75},
+ {"mapaccess2_faststr", funcTag, 75},
+ {"mapaccess2_fat", funcTag, 76},
+ {"mapassign", funcTag, 71},
+ {"mapassign_fast32", funcTag, 72},
+ {"mapassign_fast32ptr", funcTag, 72},
+ {"mapassign_fast64", funcTag, 72},
+ {"mapassign_fast64ptr", funcTag, 72},
+ {"mapassign_faststr", funcTag, 72},
+ {"mapiterinit", funcTag, 77},
+ {"mapdelete", funcTag, 77},
+ {"mapdelete_fast32", funcTag, 78},
+ {"mapdelete_fast64", funcTag, 78},
+ {"mapdelete_faststr", funcTag, 78},
+ {"mapiternext", funcTag, 79},
+ {"mapclear", funcTag, 80},
+ {"makechan64", funcTag, 82},
+ {"makechan", funcTag, 83},
+ {"chanrecv1", funcTag, 85},
+ {"chanrecv2", funcTag, 86},
+ {"chansend1", funcTag, 88},
+ {"closechan", funcTag, 30},
+ {"writeBarrier", varTag, 90},
+ {"typedmemmove", funcTag, 91},
+ {"typedmemclr", funcTag, 92},
+ {"typedslicecopy", funcTag, 93},
+ {"selectnbsend", funcTag, 94},
+ {"selectnbrecv", funcTag, 95},
+ {"selectnbrecv2", funcTag, 97},
+ {"selectsetpc", funcTag, 98},
+ {"selectgo", funcTag, 99},
+ {"block", funcTag, 9},
+ {"makeslice", funcTag, 100},
+ {"makeslice64", funcTag, 101},
+ {"makeslicecopy", funcTag, 102},
+ {"growslice", funcTag, 104},
+ {"memmove", funcTag, 105},
+ {"memclrNoHeapPointers", funcTag, 106},
+ {"memclrHasPointers", funcTag, 106},
+ {"memequal", funcTag, 107},
+ {"memequal0", funcTag, 108},
+ {"memequal8", funcTag, 108},
+ {"memequal16", funcTag, 108},
+ {"memequal32", funcTag, 108},
+ {"memequal64", funcTag, 108},
+ {"memequal128", funcTag, 108},
+ {"f32equal", funcTag, 109},
+ {"f64equal", funcTag, 109},
+ {"c64equal", funcTag, 109},
+ {"c128equal", funcTag, 109},
+ {"strequal", funcTag, 109},
+ {"interequal", funcTag, 109},
+ {"nilinterequal", funcTag, 109},
+ {"memhash", funcTag, 110},
+ {"memhash0", funcTag, 111},
+ {"memhash8", funcTag, 111},
+ {"memhash16", funcTag, 111},
+ {"memhash32", funcTag, 111},
+ {"memhash64", funcTag, 111},
+ {"memhash128", funcTag, 111},
+ {"f32hash", funcTag, 111},
+ {"f64hash", funcTag, 111},
+ {"c64hash", funcTag, 111},
+ {"c128hash", funcTag, 111},
+ {"strhash", funcTag, 111},
+ {"interhash", funcTag, 111},
+ {"nilinterhash", funcTag, 111},
+ {"int64div", funcTag, 112},
+ {"uint64div", funcTag, 113},
+ {"int64mod", funcTag, 112},
+ {"uint64mod", funcTag, 113},
+ {"float64toint64", funcTag, 114},
+ {"float64touint64", funcTag, 115},
+ {"float64touint32", funcTag, 116},
+ {"int64tofloat64", funcTag, 117},
+ {"uint64tofloat64", funcTag, 118},
+ {"uint32tofloat64", funcTag, 119},
+ {"complex128div", funcTag, 120},
+ {"racefuncenter", funcTag, 31},
+ {"racefuncenterfp", funcTag, 9},
+ {"racefuncexit", funcTag, 9},
+ {"raceread", funcTag, 31},
+ {"racewrite", funcTag, 31},
+ {"racereadrange", funcTag, 121},
+ {"racewriterange", funcTag, 121},
+ {"msanread", funcTag, 121},
+ {"msanwrite", funcTag, 121},
+ {"msanmove", funcTag, 122},
+ {"checkptrAlignment", funcTag, 123},
+ {"checkptrArithmetic", funcTag, 125},
+ {"libfuzzerTraceCmp1", funcTag, 127},
+ {"libfuzzerTraceCmp2", funcTag, 129},
+ {"libfuzzerTraceCmp4", funcTag, 130},
+ {"libfuzzerTraceCmp8", funcTag, 131},
+ {"libfuzzerTraceConstCmp1", funcTag, 127},
+ {"libfuzzerTraceConstCmp2", funcTag, 129},
+ {"libfuzzerTraceConstCmp4", funcTag, 130},
+ {"libfuzzerTraceConstCmp8", funcTag, 131},
+ {"x86HasPOPCNT", varTag, 6},
+ {"x86HasSSE41", varTag, 6},
+ {"x86HasFMA", varTag, 6},
+ {"armHasVFPv4", varTag, 6},
+ {"arm64HasATOMICS", varTag, 6},
+}
+
+func runtimeTypes() []*types.Type {
+ var typs [132]*types.Type
+ typs[0] = types.ByteType
+ typs[1] = types.NewPtr(typs[0])
+ typs[2] = types.Types[types.TANY]
+ typs[3] = types.NewPtr(typs[2])
+ typs[4] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])})
+ typs[5] = types.Types[types.TUINTPTR]
+ typs[6] = types.Types[types.TBOOL]
+ typs[7] = types.Types[types.TUNSAFEPTR]
+ typs[8] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[6])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
+ typs[9] = NewFuncType(nil, nil, nil)
+ typs[10] = types.Types[types.TINTER]
+ typs[11] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[10])}, nil)
+ typs[12] = types.Types[types.TINT32]
+ typs[13] = types.NewPtr(typs[12])
+ typs[14] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[13])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[10])})
+ typs[15] = types.Types[types.TINT]
+ typs[16] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[15])}, nil)
+ typs[17] = types.Types[types.TUINT]
+ typs[18] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[17]), ir.NewField(base.Pos, nil, nil, typs[15])}, nil)
+ typs[19] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])}, nil)
+ typs[20] = types.Types[types.TFLOAT64]
+ typs[21] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])}, nil)
+ typs[22] = types.Types[types.TINT64]
+ typs[23] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22])}, nil)
+ typs[24] = types.Types[types.TUINT64]
+ typs[25] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24])}, nil)
+ typs[26] = types.Types[types.TCOMPLEX128]
+ typs[27] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[26])}, nil)
+ typs[28] = types.Types[types.TSTRING]
+ typs[29] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])}, nil)
+ typs[30] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2])}, nil)
+ typs[31] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
+ typs[32] = types.NewArray(typs[0], 32)
+ typs[33] = types.NewPtr(typs[32])
+ typs[34] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
+ typs[35] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
+ typs[36] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
+ typs[37] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
+ typs[38] = types.NewSlice(typs[28])
+ typs[39] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[38])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
+ typs[40] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15])})
+ typs[41] = types.NewArray(typs[0], 4)
+ typs[42] = types.NewPtr(typs[41])
+ typs[43] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[42]), ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
+ typs[44] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
+ typs[45] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
+ typs[46] = types.RuneType
+ typs[47] = types.NewSlice(typs[46])
+ typs[48] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[47])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
+ typs[49] = types.NewSlice(typs[0])
+ typs[50] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[49])})
+ typs[51] = types.NewArray(typs[46], 32)
+ typs[52] = types.NewPtr(typs[51])
+ typs[53] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[52]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[47])})
+ typs[54] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[5])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15])})
+ typs[55] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[46]), ir.NewField(base.Pos, nil, nil, typs[15])})
+ typs[56] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15])})
+ typs[57] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2])})
+ typs[58] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
+ typs[59] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2])})
+ typs[60] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2]), ir.NewField(base.Pos, nil, nil, typs[6])})
+ typs[61] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[1])}, nil)
+ typs[62] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1])}, nil)
+ typs[63] = types.NewPtr(typs[5])
+ typs[64] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[63]), ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[7])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
+ typs[65] = types.Types[types.TUINT32]
+ typs[66] = NewFuncType(nil, nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[65])})
+ typs[67] = types.NewMap(typs[2], typs[2])
+ typs[68] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[22]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[67])})
+ typs[69] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[67])})
+ typs[70] = NewFuncType(nil, nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[67])})
+ typs[71] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])})
+ typs[72] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])})
+ typs[73] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[1])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])})
+ typs[74] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[6])})
+ typs[75] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[6])})
+ typs[76] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[1])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[6])})
+ typs[77] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
+ typs[78] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[2])}, nil)
+ typs[79] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
+ typs[80] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67])}, nil)
+ typs[81] = types.NewChan(typs[2], types.Cboth)
+ typs[82] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[81])})
+ typs[83] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[81])})
+ typs[84] = types.NewChan(typs[2], types.Crecv)
+ typs[85] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[84]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
+ typs[86] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[84]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
+ typs[87] = types.NewChan(typs[2], types.Csend)
+ typs[88] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[87]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
+ typs[89] = types.NewArray(typs[0], 3)
+ typs[90] = NewStructType([]*ir.Field{ir.NewField(base.Pos, Lookup("enabled"), nil, typs[6]), ir.NewField(base.Pos, Lookup("pad"), nil, typs[89]), ir.NewField(base.Pos, Lookup("needed"), nil, typs[6]), ir.NewField(base.Pos, Lookup("cgo"), nil, typs[6]), ir.NewField(base.Pos, Lookup("alignme"), nil, typs[24])})
+ typs[91] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
+ typs[92] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
+ typs[93] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15])})
+ typs[94] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[87]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
+ typs[95] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[84])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
+ typs[96] = types.NewPtr(typs[6])
+ typs[97] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[96]), ir.NewField(base.Pos, nil, nil, typs[84])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
+ typs[98] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[63])}, nil)
+ typs[99] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[63]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[6])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[6])})
+ typs[100] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
+ typs[101] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[22]), ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
+ typs[102] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[7])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
+ typs[103] = types.NewSlice(typs[2])
+ typs[104] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[103]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[103])})
+ typs[105] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
+ typs[106] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
+ typs[107] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[5])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
+ typs[108] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
+ typs[109] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[7])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
+ typs[110] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[5])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5])})
+ typs[111] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[5])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5])})
+ typs[112] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22]), ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22])})
+ typs[113] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24]), ir.NewField(base.Pos, nil, nil, typs[24])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24])})
+ typs[114] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22])})
+ typs[115] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24])})
+ typs[116] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[65])})
+ typs[117] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])})
+ typs[118] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])})
+ typs[119] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[65])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])})
+ typs[120] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[26]), ir.NewField(base.Pos, nil, nil, typs[26])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[26])})
+ typs[121] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
+ typs[122] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
+ typs[123] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
+ typs[124] = types.NewSlice(typs[7])
+ typs[125] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[124])}, nil)
+ typs[126] = types.Types[types.TUINT8]
+ typs[127] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[126]), ir.NewField(base.Pos, nil, nil, typs[126])}, nil)
+ typs[128] = types.Types[types.TUINT16]
+ typs[129] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[128]), ir.NewField(base.Pos, nil, nil, typs[128])}, nil)
+ typs[130] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[65]), ir.NewField(base.Pos, nil, nil, typs[65])}, nil)
+ typs[131] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24]), ir.NewField(base.Pos, nil, nil, typs[24])}, nil)
+ return typs[:]
+}
diff --git a/src/cmd/compile/internal/typecheck/builtin/runtime.go b/src/cmd/compile/internal/typecheck/builtin/runtime.go
new file mode 100644
index 0000000000..acb69c7b28
--- /dev/null
+++ b/src/cmd/compile/internal/typecheck/builtin/runtime.go
@@ -0,0 +1,259 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// NOTE: If you change this file you must run "go generate"
+// to update builtin.go. This is not done automatically
+// to avoid depending on having a working compiler binary.
+
+// +build ignore
+
+package runtime
+
+// emitted by compiler, not referred to by go programs
+
+import "unsafe"
+
+func newobject(typ *byte) *any
+func mallocgc(size uintptr, typ *byte, needszero bool) unsafe.Pointer
+func panicdivide()
+func panicshift()
+func panicmakeslicelen()
+func panicmakeslicecap()
+func throwinit()
+func panicwrap()
+
+func gopanic(interface{})
+func gorecover(*int32) interface{}
+func goschedguarded()
+
+// Note: these declarations are just for wasm port.
+// Other ports call assembly stubs instead.
+func goPanicIndex(x int, y int)
+func goPanicIndexU(x uint, y int)
+func goPanicSliceAlen(x int, y int)
+func goPanicSliceAlenU(x uint, y int)
+func goPanicSliceAcap(x int, y int)
+func goPanicSliceAcapU(x uint, y int)
+func goPanicSliceB(x int, y int)
+func goPanicSliceBU(x uint, y int)
+func goPanicSlice3Alen(x int, y int)
+func goPanicSlice3AlenU(x uint, y int)
+func goPanicSlice3Acap(x int, y int)
+func goPanicSlice3AcapU(x uint, y int)
+func goPanicSlice3B(x int, y int)
+func goPanicSlice3BU(x uint, y int)
+func goPanicSlice3C(x int, y int)
+func goPanicSlice3CU(x uint, y int)
+
+func printbool(bool)
+func printfloat(float64)
+func printint(int64)
+func printhex(uint64)
+func printuint(uint64)
+func printcomplex(complex128)
+func printstring(string)
+func printpointer(any)
+func printuintptr(uintptr)
+func printiface(any)
+func printeface(any)
+func printslice(any)
+func printnl()
+func printsp()
+func printlock()
+func printunlock()
+
+func concatstring2(*[32]byte, string, string) string
+func concatstring3(*[32]byte, string, string, string) string
+func concatstring4(*[32]byte, string, string, string, string) string
+func concatstring5(*[32]byte, string, string, string, string, string) string
+func concatstrings(*[32]byte, []string) string
+
+func cmpstring(string, string) int
+func intstring(*[4]byte, int64) string
+func slicebytetostring(buf *[32]byte, ptr *byte, n int) string
+func slicebytetostringtmp(ptr *byte, n int) string
+func slicerunetostring(*[32]byte, []rune) string
+func stringtoslicebyte(*[32]byte, string) []byte
+func stringtoslicerune(*[32]rune, string) []rune
+func slicecopy(toPtr *any, toLen int, fromPtr *any, fromLen int, wid uintptr) int
+
+func decoderune(string, int) (retv rune, retk int)
+func countrunes(string) int
+
+// Non-empty-interface to non-empty-interface conversion.
+func convI2I(typ *byte, elem any) (ret any)
+
+// Specialized type-to-interface conversion.
+// These return only a data pointer.
+func convT16(val any) unsafe.Pointer // val must be uint16-like (same size and alignment as a uint16)
+func convT32(val any) unsafe.Pointer // val must be uint32-like (same size and alignment as a uint32)
+func convT64(val any) unsafe.Pointer // val must be uint64-like (same size and alignment as a uint64 and contains no pointers)
+func convTstring(val any) unsafe.Pointer // val must be a string
+func convTslice(val any) unsafe.Pointer // val must be a slice
+
+// Type to empty-interface conversion.
+func convT2E(typ *byte, elem *any) (ret any)
+func convT2Enoptr(typ *byte, elem *any) (ret any)
+
+// Type to non-empty-interface conversion.
+func convT2I(tab *byte, elem *any) (ret any)
+func convT2Inoptr(tab *byte, elem *any) (ret any)
+
+// interface type assertions x.(T)
+func assertE2I(typ *byte, iface any) (ret any)
+func assertE2I2(typ *byte, iface any) (ret any, b bool)
+func assertI2I(typ *byte, iface any) (ret any)
+func assertI2I2(typ *byte, iface any) (ret any, b bool)
+func panicdottypeE(have, want, iface *byte)
+func panicdottypeI(have, want, iface *byte)
+func panicnildottype(want *byte)
+
+// interface equality. Type/itab pointers are already known to be equal, so
+// we only need to pass one.
+func ifaceeq(tab *uintptr, x, y unsafe.Pointer) (ret bool)
+func efaceeq(typ *uintptr, x, y unsafe.Pointer) (ret bool)
+
+func fastrand() uint32
+
+// *byte is really *runtime.Type
+func makemap64(mapType *byte, hint int64, mapbuf *any) (hmap map[any]any)
+func makemap(mapType *byte, hint int, mapbuf *any) (hmap map[any]any)
+func makemap_small() (hmap map[any]any)
+func mapaccess1(mapType *byte, hmap map[any]any, key *any) (val *any)
+func mapaccess1_fast32(mapType *byte, hmap map[any]any, key any) (val *any)
+func mapaccess1_fast64(mapType *byte, hmap map[any]any, key any) (val *any)
+func mapaccess1_faststr(mapType *byte, hmap map[any]any, key any) (val *any)
+func mapaccess1_fat(mapType *byte, hmap map[any]any, key *any, zero *byte) (val *any)
+func mapaccess2(mapType *byte, hmap map[any]any, key *any) (val *any, pres bool)
+func mapaccess2_fast32(mapType *byte, hmap map[any]any, key any) (val *any, pres bool)
+func mapaccess2_fast64(mapType *byte, hmap map[any]any, key any) (val *any, pres bool)
+func mapaccess2_faststr(mapType *byte, hmap map[any]any, key any) (val *any, pres bool)
+func mapaccess2_fat(mapType *byte, hmap map[any]any, key *any, zero *byte) (val *any, pres bool)
+func mapassign(mapType *byte, hmap map[any]any, key *any) (val *any)
+func mapassign_fast32(mapType *byte, hmap map[any]any, key any) (val *any)
+func mapassign_fast32ptr(mapType *byte, hmap map[any]any, key any) (val *any)
+func mapassign_fast64(mapType *byte, hmap map[any]any, key any) (val *any)
+func mapassign_fast64ptr(mapType *byte, hmap map[any]any, key any) (val *any)
+func mapassign_faststr(mapType *byte, hmap map[any]any, key any) (val *any)
+func mapiterinit(mapType *byte, hmap map[any]any, hiter *any)
+func mapdelete(mapType *byte, hmap map[any]any, key *any)
+func mapdelete_fast32(mapType *byte, hmap map[any]any, key any)
+func mapdelete_fast64(mapType *byte, hmap map[any]any, key any)
+func mapdelete_faststr(mapType *byte, hmap map[any]any, key any)
+func mapiternext(hiter *any)
+func mapclear(mapType *byte, hmap map[any]any)
+
+// *byte is really *runtime.Type
+func makechan64(chanType *byte, size int64) (hchan chan any)
+func makechan(chanType *byte, size int) (hchan chan any)
+func chanrecv1(hchan <-chan any, elem *any)
+func chanrecv2(hchan <-chan any, elem *any) bool
+func chansend1(hchan chan<- any, elem *any)
+func closechan(hchan any)
+
+var writeBarrier struct {
+ enabled bool
+ pad [3]byte
+ needed bool
+ cgo bool
+ alignme uint64
+}
+
+// *byte is really *runtime.Type
+func typedmemmove(typ *byte, dst *any, src *any)
+func typedmemclr(typ *byte, dst *any)
+func typedslicecopy(typ *byte, dstPtr *any, dstLen int, srcPtr *any, srcLen int) int
+
+func selectnbsend(hchan chan<- any, elem *any) bool
+func selectnbrecv(elem *any, hchan <-chan any) bool
+func selectnbrecv2(elem *any, received *bool, hchan <-chan any) bool
+
+func selectsetpc(pc *uintptr)
+func selectgo(cas0 *byte, order0 *byte, pc0 *uintptr, nsends int, nrecvs int, block bool) (int, bool)
+func block()
+
+func makeslice(typ *byte, len int, cap int) unsafe.Pointer
+func makeslice64(typ *byte, len int64, cap int64) unsafe.Pointer
+func makeslicecopy(typ *byte, tolen int, fromlen int, from unsafe.Pointer) unsafe.Pointer
+func growslice(typ *byte, old []any, cap int) (ary []any)
+func memmove(to *any, frm *any, length uintptr)
+func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr)
+func memclrHasPointers(ptr unsafe.Pointer, n uintptr)
+
+func memequal(x, y *any, size uintptr) bool
+func memequal0(x, y *any) bool
+func memequal8(x, y *any) bool
+func memequal16(x, y *any) bool
+func memequal32(x, y *any) bool
+func memequal64(x, y *any) bool
+func memequal128(x, y *any) bool
+func f32equal(p, q unsafe.Pointer) bool
+func f64equal(p, q unsafe.Pointer) bool
+func c64equal(p, q unsafe.Pointer) bool
+func c128equal(p, q unsafe.Pointer) bool
+func strequal(p, q unsafe.Pointer) bool
+func interequal(p, q unsafe.Pointer) bool
+func nilinterequal(p, q unsafe.Pointer) bool
+
+func memhash(p unsafe.Pointer, h uintptr, size uintptr) uintptr
+func memhash0(p unsafe.Pointer, h uintptr) uintptr
+func memhash8(p unsafe.Pointer, h uintptr) uintptr
+func memhash16(p unsafe.Pointer, h uintptr) uintptr
+func memhash32(p unsafe.Pointer, h uintptr) uintptr
+func memhash64(p unsafe.Pointer, h uintptr) uintptr
+func memhash128(p unsafe.Pointer, h uintptr) uintptr
+func f32hash(p unsafe.Pointer, h uintptr) uintptr
+func f64hash(p unsafe.Pointer, h uintptr) uintptr
+func c64hash(p unsafe.Pointer, h uintptr) uintptr
+func c128hash(p unsafe.Pointer, h uintptr) uintptr
+func strhash(a unsafe.Pointer, h uintptr) uintptr
+func interhash(p unsafe.Pointer, h uintptr) uintptr
+func nilinterhash(p unsafe.Pointer, h uintptr) uintptr
+
+// only used on 32-bit
+func int64div(int64, int64) int64
+func uint64div(uint64, uint64) uint64
+func int64mod(int64, int64) int64
+func uint64mod(uint64, uint64) uint64
+func float64toint64(float64) int64
+func float64touint64(float64) uint64
+func float64touint32(float64) uint32
+func int64tofloat64(int64) float64
+func uint64tofloat64(uint64) float64
+func uint32tofloat64(uint32) float64
+
+func complex128div(num complex128, den complex128) (quo complex128)
+
+// race detection
+func racefuncenter(uintptr)
+func racefuncenterfp()
+func racefuncexit()
+func raceread(uintptr)
+func racewrite(uintptr)
+func racereadrange(addr, size uintptr)
+func racewriterange(addr, size uintptr)
+
+// memory sanitizer
+func msanread(addr, size uintptr)
+func msanwrite(addr, size uintptr)
+func msanmove(dst, src, size uintptr)
+
+func checkptrAlignment(unsafe.Pointer, *byte, uintptr)
+func checkptrArithmetic(unsafe.Pointer, []unsafe.Pointer)
+
+func libfuzzerTraceCmp1(uint8, uint8)
+func libfuzzerTraceCmp2(uint16, uint16)
+func libfuzzerTraceCmp4(uint32, uint32)
+func libfuzzerTraceCmp8(uint64, uint64)
+func libfuzzerTraceConstCmp1(uint8, uint8)
+func libfuzzerTraceConstCmp2(uint16, uint16)
+func libfuzzerTraceConstCmp4(uint32, uint32)
+func libfuzzerTraceConstCmp8(uint64, uint64)
+
+// architecture variants
+var x86HasPOPCNT bool
+var x86HasSSE41 bool
+var x86HasFMA bool
+var armHasVFPv4 bool
+var arm64HasATOMICS bool
diff --git a/src/cmd/compile/internal/typecheck/builtin_test.go b/src/cmd/compile/internal/typecheck/builtin_test.go
new file mode 100644
index 0000000000..cc8d49730a
--- /dev/null
+++ b/src/cmd/compile/internal/typecheck/builtin_test.go
@@ -0,0 +1,33 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typecheck
+
+import (
+ "bytes"
+ "internal/testenv"
+ "io/ioutil"
+ "os/exec"
+ "testing"
+)
+
+func TestBuiltin(t *testing.T) {
+ t.Skip("mkbuiltin needs fixing")
+ testenv.MustHaveGoRun(t)
+ t.Parallel()
+
+ old, err := ioutil.ReadFile("builtin.go")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ new, err := exec.Command(testenv.GoToolPath(t), "run", "mkbuiltin.go", "-stdout").Output()
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if !bytes.Equal(old, new) {
+ t.Fatal("builtin.go out of date; run mkbuiltin.go")
+ }
+}
diff --git a/src/cmd/compile/internal/typecheck/const.go b/src/cmd/compile/internal/typecheck/const.go
new file mode 100644
index 0000000000..54d70cb835
--- /dev/null
+++ b/src/cmd/compile/internal/typecheck/const.go
@@ -0,0 +1,944 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typecheck
+
+import (
+ "fmt"
+ "go/constant"
+ "go/token"
+ "math"
+ "math/big"
+ "strings"
+ "unicode"
+
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/types"
+ "cmd/internal/src"
+)
+
+func roundFloat(v constant.Value, sz int64) constant.Value {
+ switch sz {
+ case 4:
+ f, _ := constant.Float32Val(v)
+ return makeFloat64(float64(f))
+ case 8:
+ f, _ := constant.Float64Val(v)
+ return makeFloat64(f)
+ }
+ base.Fatalf("unexpected size: %v", sz)
+ panic("unreachable")
+}
+
+// truncate float literal fv to 32-bit or 64-bit precision
+// according to type; return truncated value.
+func truncfltlit(v constant.Value, t *types.Type) constant.Value {
+ if t.IsUntyped() || overflow(v, t) {
+ // If there was overflow, simply continuing would set the
+ // value to Inf which in turn would lead to spurious follow-on
+ // errors. Avoid this by returning the existing value.
+ return v
+ }
+
+ return roundFloat(v, t.Size())
+}
+
+// truncate Real and Imag parts of Mpcplx to 32-bit or 64-bit
+// precision, according to type; return truncated value. In case of
+// overflow, calls Errorf but does not truncate the input value.
+func trunccmplxlit(v constant.Value, t *types.Type) constant.Value {
+ if t.IsUntyped() || overflow(v, t) {
+ // If there was overflow, simply continuing would set the
+ // value to Inf which in turn would lead to spurious follow-on
+ // errors. Avoid this by returning the existing value.
+ return v
+ }
+
+ fsz := t.Size() / 2
+ return makeComplex(roundFloat(constant.Real(v), fsz), roundFloat(constant.Imag(v), fsz))
+}
+
+// TODO(mdempsky): Replace these with better APIs.
+func convlit(n ir.Node, t *types.Type) ir.Node { return convlit1(n, t, false, nil) }
+func DefaultLit(n ir.Node, t *types.Type) ir.Node { return convlit1(n, t, false, nil) }
+
+// convlit1 converts an untyped expression n to type t. If n already
+// has a type, convlit1 has no effect.
+//
+// For explicit conversions, t must be non-nil, and integer-to-string
+// conversions are allowed.
+//
+// For implicit conversions (e.g., assignments), t may be nil; if so,
+// n is converted to its default type.
+//
+// If there's an error converting n to t, context is used in the error
+// message.
+func convlit1(n ir.Node, t *types.Type, explicit bool, context func() string) ir.Node {
+ if explicit && t == nil {
+ base.Fatalf("explicit conversion missing type")
+ }
+ if t != nil && t.IsUntyped() {
+ base.Fatalf("bad conversion to untyped: %v", t)
+ }
+
+ if n == nil || n.Type() == nil {
+ // Allow sloppy callers.
+ return n
+ }
+ if !n.Type().IsUntyped() {
+ // Already typed; nothing to do.
+ return n
+ }
+
+ // Nil is technically not a constant, so handle it specially.
+ if n.Type().Kind() == types.TNIL {
+ if n.Op() != ir.ONIL {
+ base.Fatalf("unexpected op: %v (%v)", n, n.Op())
+ }
+ n = ir.Copy(n)
+ if t == nil {
+ base.Errorf("use of untyped nil")
+ n.SetDiag(true)
+ n.SetType(nil)
+ return n
+ }
+
+ if !t.HasNil() {
+ // Leave for caller to handle.
+ return n
+ }
+
+ n.SetType(t)
+ return n
+ }
+
+ if t == nil || !ir.OKForConst[t.Kind()] {
+ t = defaultType(n.Type())
+ }
+
+ switch n.Op() {
+ default:
+ base.Fatalf("unexpected untyped expression: %v", n)
+
+ case ir.OLITERAL:
+ v := convertVal(n.Val(), t, explicit)
+ if v.Kind() == constant.Unknown {
+ n = ir.NewConstExpr(n.Val(), n)
+ break
+ }
+ n = ir.NewConstExpr(v, n)
+ n.SetType(t)
+ return n
+
+ case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT, ir.OREAL, ir.OIMAG:
+ ot := operandType(n.Op(), t)
+ if ot == nil {
+ n = DefaultLit(n, nil)
+ break
+ }
+
+ n := n.(*ir.UnaryExpr)
+ n.X = convlit(n.X, ot)
+ if n.X.Type() == nil {
+ n.SetType(nil)
+ return n
+ }
+ n.SetType(t)
+ return n
+
+ case ir.OADD, ir.OSUB, ir.OMUL, ir.ODIV, ir.OMOD, ir.OOR, ir.OXOR, ir.OAND, ir.OANDNOT, ir.OOROR, ir.OANDAND, ir.OCOMPLEX:
+ ot := operandType(n.Op(), t)
+ if ot == nil {
+ n = DefaultLit(n, nil)
+ break
+ }
+
+ var l, r ir.Node
+ switch n := n.(type) {
+ case *ir.BinaryExpr:
+ n.X = convlit(n.X, ot)
+ n.Y = convlit(n.Y, ot)
+ l, r = n.X, n.Y
+ case *ir.LogicalExpr:
+ n.X = convlit(n.X, ot)
+ n.Y = convlit(n.Y, ot)
+ l, r = n.X, n.Y
+ }
+
+ if l.Type() == nil || r.Type() == nil {
+ n.SetType(nil)
+ return n
+ }
+ if !types.Identical(l.Type(), r.Type()) {
+ base.Errorf("invalid operation: %v (mismatched types %v and %v)", n, l.Type(), r.Type())
+ n.SetType(nil)
+ return n
+ }
+
+ n.SetType(t)
+ return n
+
+ case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
+ n := n.(*ir.BinaryExpr)
+ if !t.IsBoolean() {
+ break
+ }
+ n.SetType(t)
+ return n
+
+ case ir.OLSH, ir.ORSH:
+ n := n.(*ir.BinaryExpr)
+ n.X = convlit1(n.X, t, explicit, nil)
+ n.SetType(n.X.Type())
+ if n.Type() != nil && !n.Type().IsInteger() {
+ base.Errorf("invalid operation: %v (shift of type %v)", n, n.Type())
+ n.SetType(nil)
+ }
+ return n
+ }
+
+ if !n.Diag() {
+ if !t.Broke() {
+ if explicit {
+ base.Errorf("cannot convert %L to type %v", n, t)
+ } else if context != nil {
+ base.Errorf("cannot use %L as type %v in %s", n, t, context())
+ } else {
+ base.Errorf("cannot use %L as type %v", n, t)
+ }
+ }
+ n.SetDiag(true)
+ }
+ n.SetType(nil)
+ return n
+}
+
+func operandType(op ir.Op, t *types.Type) *types.Type {
+ switch op {
+ case ir.OCOMPLEX:
+ if t.IsComplex() {
+ return types.FloatForComplex(t)
+ }
+ case ir.OREAL, ir.OIMAG:
+ if t.IsFloat() {
+ return types.ComplexForFloat(t)
+ }
+ default:
+ if okfor[op][t.Kind()] {
+ return t
+ }
+ }
+ return nil
+}
+
+// convertVal converts v into a representation appropriate for t. If
+// no such representation exists, it returns Val{} instead.
+//
+// If explicit is true, then conversions from integer to string are
+// also allowed.
+func convertVal(v constant.Value, t *types.Type, explicit bool) constant.Value {
+ switch ct := v.Kind(); ct {
+ case constant.Bool:
+ if t.IsBoolean() {
+ return v
+ }
+
+ case constant.String:
+ if t.IsString() {
+ return v
+ }
+
+ case constant.Int:
+ if explicit && t.IsString() {
+ return tostr(v)
+ }
+ fallthrough
+ case constant.Float, constant.Complex:
+ switch {
+ case t.IsInteger():
+ v = toint(v)
+ overflow(v, t)
+ return v
+ case t.IsFloat():
+ v = toflt(v)
+ v = truncfltlit(v, t)
+ return v
+ case t.IsComplex():
+ v = tocplx(v)
+ v = trunccmplxlit(v, t)
+ return v
+ }
+ }
+
+ return constant.MakeUnknown()
+}
+
+func tocplx(v constant.Value) constant.Value {
+ return constant.ToComplex(v)
+}
+
+func toflt(v constant.Value) constant.Value {
+ if v.Kind() == constant.Complex {
+ if constant.Sign(constant.Imag(v)) != 0 {
+ base.Errorf("constant %v truncated to real", v)
+ }
+ v = constant.Real(v)
+ }
+
+ return constant.ToFloat(v)
+}
+
+func toint(v constant.Value) constant.Value {
+ if v.Kind() == constant.Complex {
+ if constant.Sign(constant.Imag(v)) != 0 {
+ base.Errorf("constant %v truncated to integer", v)
+ }
+ v = constant.Real(v)
+ }
+
+ if v := constant.ToInt(v); v.Kind() == constant.Int {
+ return v
+ }
+
+ // The value of v cannot be represented as an integer;
+ // so we need to print an error message.
+ // Unfortunately some float values cannot be
+ // reasonably formatted for inclusion in an error
+ // message (example: 1 + 1e-100), so first we try to
+ // format the float; if the truncation resulted in
+ // something that looks like an integer we omit the
+ // value from the error message.
+ // (See issue #11371).
+ f := ir.BigFloat(v)
+ if f.MantExp(nil) > 2*ir.ConstPrec {
+ base.Errorf("integer too large")
+ } else {
+ var t big.Float
+ t.Parse(fmt.Sprint(v), 0)
+ if t.IsInt() {
+ base.Errorf("constant truncated to integer")
+ } else {
+ base.Errorf("constant %v truncated to integer", v)
+ }
+ }
+
+ // Prevent follow-on errors.
+ // TODO(mdempsky): Use constant.MakeUnknown() instead.
+ return constant.MakeInt64(1)
+}
+
+// overflow reports whether constant value v is too large
+// to represent with type t, and emits an error message if so.
+func overflow(v constant.Value, t *types.Type) bool {
+ // v has already been converted
+ // to appropriate form for t.
+ if t.IsUntyped() {
+ return false
+ }
+ if v.Kind() == constant.Int && constant.BitLen(v) > ir.ConstPrec {
+ base.Errorf("integer too large")
+ return true
+ }
+ if ir.ConstOverflow(v, t) {
+ base.Errorf("constant %v overflows %v", types.FmtConst(v, false), t)
+ return true
+ }
+ return false
+}
+
+func tostr(v constant.Value) constant.Value {
+ if v.Kind() == constant.Int {
+ r := unicode.ReplacementChar
+ if x, ok := constant.Uint64Val(v); ok && x <= unicode.MaxRune {
+ r = rune(x)
+ }
+ v = constant.MakeString(string(r))
+ }
+ return v
+}
+
+var tokenForOp = [...]token.Token{
+ ir.OPLUS: token.ADD,
+ ir.ONEG: token.SUB,
+ ir.ONOT: token.NOT,
+ ir.OBITNOT: token.XOR,
+
+ ir.OADD: token.ADD,
+ ir.OSUB: token.SUB,
+ ir.OMUL: token.MUL,
+ ir.ODIV: token.QUO,
+ ir.OMOD: token.REM,
+ ir.OOR: token.OR,
+ ir.OXOR: token.XOR,
+ ir.OAND: token.AND,
+ ir.OANDNOT: token.AND_NOT,
+ ir.OOROR: token.LOR,
+ ir.OANDAND: token.LAND,
+
+ ir.OEQ: token.EQL,
+ ir.ONE: token.NEQ,
+ ir.OLT: token.LSS,
+ ir.OLE: token.LEQ,
+ ir.OGT: token.GTR,
+ ir.OGE: token.GEQ,
+
+ ir.OLSH: token.SHL,
+ ir.ORSH: token.SHR,
+}
+
+// EvalConst returns a constant-evaluated expression equivalent to n.
+// If n is not a constant, EvalConst returns n.
+// Otherwise, EvalConst returns a new OLITERAL with the same value as n,
+// and with .Orig pointing back to n.
+func EvalConst(n ir.Node) ir.Node {
+ // Pick off just the opcodes that can be constant evaluated.
+ switch n.Op() {
+ case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT:
+ n := n.(*ir.UnaryExpr)
+ nl := n.X
+ if nl.Op() == ir.OLITERAL {
+ var prec uint
+ if n.Type().IsUnsigned() {
+ prec = uint(n.Type().Size() * 8)
+ }
+ return OrigConst(n, constant.UnaryOp(tokenForOp[n.Op()], nl.Val(), prec))
+ }
+
+ case ir.OADD, ir.OSUB, ir.OMUL, ir.ODIV, ir.OMOD, ir.OOR, ir.OXOR, ir.OAND, ir.OANDNOT:
+ n := n.(*ir.BinaryExpr)
+ nl, nr := n.X, n.Y
+ if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
+ rval := nr.Val()
+
+ // check for divisor underflow in complex division (see issue 20227)
+ if n.Op() == ir.ODIV && n.Type().IsComplex() && constant.Sign(square(constant.Real(rval))) == 0 && constant.Sign(square(constant.Imag(rval))) == 0 {
+ base.Errorf("complex division by zero")
+ n.SetType(nil)
+ return n
+ }
+ if (n.Op() == ir.ODIV || n.Op() == ir.OMOD) && constant.Sign(rval) == 0 {
+ base.Errorf("division by zero")
+ n.SetType(nil)
+ return n
+ }
+
+ tok := tokenForOp[n.Op()]
+ if n.Op() == ir.ODIV && n.Type().IsInteger() {
+ tok = token.QUO_ASSIGN // integer division
+ }
+ return OrigConst(n, constant.BinaryOp(nl.Val(), tok, rval))
+ }
+
+ case ir.OOROR, ir.OANDAND:
+ n := n.(*ir.LogicalExpr)
+ nl, nr := n.X, n.Y
+ if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
+ return OrigConst(n, constant.BinaryOp(nl.Val(), tokenForOp[n.Op()], nr.Val()))
+ }
+
+ case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
+ n := n.(*ir.BinaryExpr)
+ nl, nr := n.X, n.Y
+ if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
+ return OrigBool(n, constant.Compare(nl.Val(), tokenForOp[n.Op()], nr.Val()))
+ }
+
+ case ir.OLSH, ir.ORSH:
+ n := n.(*ir.BinaryExpr)
+ nl, nr := n.X, n.Y
+ if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
+ // shiftBound from go/types; "so we can express smallestFloat64"
+ const shiftBound = 1023 - 1 + 52
+ s, ok := constant.Uint64Val(nr.Val())
+ if !ok || s > shiftBound {
+ base.Errorf("invalid shift count %v", nr)
+ n.SetType(nil)
+ break
+ }
+ return OrigConst(n, constant.Shift(toint(nl.Val()), tokenForOp[n.Op()], uint(s)))
+ }
+
+ case ir.OCONV, ir.ORUNESTR:
+ n := n.(*ir.ConvExpr)
+ nl := n.X
+ if ir.OKForConst[n.Type().Kind()] && nl.Op() == ir.OLITERAL {
+ return OrigConst(n, convertVal(nl.Val(), n.Type(), true))
+ }
+
+ case ir.OCONVNOP:
+ n := n.(*ir.ConvExpr)
+ nl := n.X
+ if ir.OKForConst[n.Type().Kind()] && nl.Op() == ir.OLITERAL {
+ // set so n.Orig gets OCONV instead of OCONVNOP
+ n.SetOp(ir.OCONV)
+ return OrigConst(n, nl.Val())
+ }
+
+ case ir.OADDSTR:
+ // Merge adjacent constants in the argument list.
+ n := n.(*ir.AddStringExpr)
+ s := n.List
+ need := 0
+ for i := 0; i < len(s); i++ {
+ if i == 0 || !ir.IsConst(s[i-1], constant.String) || !ir.IsConst(s[i], constant.String) {
+ // Can't merge s[i] into s[i-1]; need a slot in the list.
+ need++
+ }
+ }
+ if need == len(s) {
+ return n
+ }
+ if need == 1 {
+ var strs []string
+ for _, c := range s {
+ strs = append(strs, ir.StringVal(c))
+ }
+ return OrigConst(n, constant.MakeString(strings.Join(strs, "")))
+ }
+ newList := make([]ir.Node, 0, need)
+ for i := 0; i < len(s); i++ {
+ if ir.IsConst(s[i], constant.String) && i+1 < len(s) && ir.IsConst(s[i+1], constant.String) {
+ // merge from i up to but not including i2
+ var strs []string
+ i2 := i
+ for i2 < len(s) && ir.IsConst(s[i2], constant.String) {
+ strs = append(strs, ir.StringVal(s[i2]))
+ i2++
+ }
+
+ nl := ir.Copy(n).(*ir.AddStringExpr)
+ nl.List.Set(s[i:i2])
+ newList = append(newList, OrigConst(nl, constant.MakeString(strings.Join(strs, ""))))
+ i = i2 - 1
+ } else {
+ newList = append(newList, s[i])
+ }
+ }
+
+ nn := ir.Copy(n).(*ir.AddStringExpr)
+ nn.List.Set(newList)
+ return nn
+
+ case ir.OCAP, ir.OLEN:
+ n := n.(*ir.UnaryExpr)
+ nl := n.X
+ switch nl.Type().Kind() {
+ case types.TSTRING:
+ if ir.IsConst(nl, constant.String) {
+ return OrigInt(n, int64(len(ir.StringVal(nl))))
+ }
+ case types.TARRAY:
+ if !anyCallOrChan(nl) {
+ return OrigInt(n, nl.Type().NumElem())
+ }
+ }
+
+ case ir.OALIGNOF, ir.OOFFSETOF, ir.OSIZEOF:
+ n := n.(*ir.UnaryExpr)
+ return OrigInt(n, evalunsafe(n))
+
+ case ir.OREAL:
+ n := n.(*ir.UnaryExpr)
+ nl := n.X
+ if nl.Op() == ir.OLITERAL {
+ return OrigConst(n, constant.Real(nl.Val()))
+ }
+
+ case ir.OIMAG:
+ n := n.(*ir.UnaryExpr)
+ nl := n.X
+ if nl.Op() == ir.OLITERAL {
+ return OrigConst(n, constant.Imag(nl.Val()))
+ }
+
+ case ir.OCOMPLEX:
+ n := n.(*ir.BinaryExpr)
+ nl, nr := n.X, n.Y
+ if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
+ return OrigConst(n, makeComplex(nl.Val(), nr.Val()))
+ }
+ }
+
+ return n
+}
+
+func makeInt(i *big.Int) constant.Value {
+ if i.IsInt64() {
+ return constant.Make(i.Int64()) // workaround #42640 (Int64Val(Make(big.NewInt(10))) returns (10, false), not (10, true))
+ }
+ return constant.Make(i)
+}
+
+func makeFloat64(f float64) constant.Value {
+ if math.IsInf(f, 0) {
+ base.Fatalf("infinity is not a valid constant")
+ }
+ v := constant.MakeFloat64(f)
+ v = constant.ToFloat(v) // workaround #42641 (MakeFloat64(0).Kind() returns Int, not Float)
+ return v
+}
+
+func makeComplex(real, imag constant.Value) constant.Value {
+ return constant.BinaryOp(constant.ToFloat(real), token.ADD, constant.MakeImag(constant.ToFloat(imag)))
+}
+
+func square(x constant.Value) constant.Value {
+ return constant.BinaryOp(x, token.MUL, x)
+}
+
+// For matching historical "constant OP overflow" error messages.
+// TODO(mdempsky): Replace with error messages like go/types uses.
+var overflowNames = [...]string{
+ ir.OADD: "addition",
+ ir.OSUB: "subtraction",
+ ir.OMUL: "multiplication",
+ ir.OLSH: "shift",
+ ir.OXOR: "bitwise XOR",
+ ir.OBITNOT: "bitwise complement",
+}
+
+// OrigConst returns an OLITERAL with orig n and value v.
+func OrigConst(n ir.Node, v constant.Value) ir.Node {
+ lno := ir.SetPos(n)
+ v = convertVal(v, n.Type(), false)
+ base.Pos = lno
+
+ switch v.Kind() {
+ case constant.Int:
+ if constant.BitLen(v) <= ir.ConstPrec {
+ break
+ }
+ fallthrough
+ case constant.Unknown:
+ what := overflowNames[n.Op()]
+ if what == "" {
+ base.Fatalf("unexpected overflow: %v", n.Op())
+ }
+ base.ErrorfAt(n.Pos(), "constant %v overflow", what)
+ n.SetType(nil)
+ return n
+ }
+
+ return ir.NewConstExpr(v, n)
+}
+
+func OrigBool(n ir.Node, v bool) ir.Node {
+ return OrigConst(n, constant.MakeBool(v))
+}
+
+func OrigInt(n ir.Node, v int64) ir.Node {
+ return OrigConst(n, constant.MakeInt64(v))
+}
+
+// defaultlit on both nodes simultaneously;
+// if they're both ideal going in they better
+// get the same type going out.
+// force means must assign concrete (non-ideal) type.
+// The results of defaultlit2 MUST be assigned back to l and r, e.g.
+// n.Left, n.Right = defaultlit2(n.Left, n.Right, force)
+func defaultlit2(l ir.Node, r ir.Node, force bool) (ir.Node, ir.Node) {
+ if l.Type() == nil || r.Type() == nil {
+ return l, r
+ }
+ if !l.Type().IsUntyped() {
+ r = convlit(r, l.Type())
+ return l, r
+ }
+
+ if !r.Type().IsUntyped() {
+ l = convlit(l, r.Type())
+ return l, r
+ }
+
+ if !force {
+ return l, r
+ }
+
+ // Can't mix bool with non-bool, string with non-string, or nil with anything (untyped).
+ if l.Type().IsBoolean() != r.Type().IsBoolean() {
+ return l, r
+ }
+ if l.Type().IsString() != r.Type().IsString() {
+ return l, r
+ }
+ if ir.IsNil(l) || ir.IsNil(r) {
+ return l, r
+ }
+
+ t := defaultType(mixUntyped(l.Type(), r.Type()))
+ l = convlit(l, t)
+ r = convlit(r, t)
+ return l, r
+}
+
+func mixUntyped(t1, t2 *types.Type) *types.Type {
+ if t1 == t2 {
+ return t1
+ }
+
+ rank := func(t *types.Type) int {
+ switch t {
+ case types.UntypedInt:
+ return 0
+ case types.UntypedRune:
+ return 1
+ case types.UntypedFloat:
+ return 2
+ case types.UntypedComplex:
+ return 3
+ }
+ base.Fatalf("bad type %v", t)
+ panic("unreachable")
+ }
+
+ if rank(t2) > rank(t1) {
+ return t2
+ }
+ return t1
+}
+
+func defaultType(t *types.Type) *types.Type {
+ if !t.IsUntyped() || t.Kind() == types.TNIL {
+ return t
+ }
+
+ switch t {
+ case types.UntypedBool:
+ return types.Types[types.TBOOL]
+ case types.UntypedString:
+ return types.Types[types.TSTRING]
+ case types.UntypedInt:
+ return types.Types[types.TINT]
+ case types.UntypedRune:
+ return types.RuneType
+ case types.UntypedFloat:
+ return types.Types[types.TFLOAT64]
+ case types.UntypedComplex:
+ return types.Types[types.TCOMPLEX128]
+ }
+
+ base.Fatalf("bad type %v", t)
+ return nil
+}
+
+// IndexConst checks if Node n contains a constant expression
+// representable as a non-negative int and returns its value.
+// If n is not a constant expression, not representable as an
+// integer, or negative, it returns -1. If n is too large, it
+// returns -2.
+func IndexConst(n ir.Node) int64 {
+ if n.Op() != ir.OLITERAL {
+ return -1
+ }
+ if !n.Type().IsInteger() && n.Type().Kind() != types.TIDEAL {
+ return -1
+ }
+
+ v := toint(n.Val())
+ if v.Kind() != constant.Int || constant.Sign(v) < 0 {
+ return -1
+ }
+ if ir.ConstOverflow(v, types.Types[types.TINT]) {
+ return -2
+ }
+ return ir.IntVal(types.Types[types.TINT], v)
+}
+
+// anyCallOrChan reports whether n contains any calls or channel operations.
+func anyCallOrChan(n ir.Node) bool {
+ return ir.Any(n, func(n ir.Node) bool {
+ switch n.Op() {
+ case ir.OAPPEND,
+ ir.OCALL,
+ ir.OCALLFUNC,
+ ir.OCALLINTER,
+ ir.OCALLMETH,
+ ir.OCAP,
+ ir.OCLOSE,
+ ir.OCOMPLEX,
+ ir.OCOPY,
+ ir.ODELETE,
+ ir.OIMAG,
+ ir.OLEN,
+ ir.OMAKE,
+ ir.ONEW,
+ ir.OPANIC,
+ ir.OPRINT,
+ ir.OPRINTN,
+ ir.OREAL,
+ ir.ORECOVER,
+ ir.ORECV:
+ return true
+ }
+ return false
+ })
+}
+
+// A constSet represents a set of Go constant expressions.
+type constSet struct {
+ m map[constSetKey]src.XPos
+}
+
+type constSetKey struct {
+ typ *types.Type
+ val interface{}
+}
+
+// add adds constant expression n to s. If a constant expression of
+// equal value and identical type has already been added, then add
+// reports an error about the duplicate value.
+//
+// pos provides position information for where expression n occurred
+// (in case n does not have its own position information). what and
+// where are used in the error message.
+//
+// n must not be an untyped constant.
+func (s *constSet) add(pos src.XPos, n ir.Node, what, where string) {
+ if conv := n; conv.Op() == ir.OCONVIFACE {
+ conv := conv.(*ir.ConvExpr)
+ if conv.Implicit() {
+ n = conv.X
+ }
+ }
+
+ if !ir.IsConstNode(n) {
+ return
+ }
+ if n.Type().IsUntyped() {
+ base.Fatalf("%v is untyped", n)
+ }
+
+ // Consts are only duplicates if they have the same value and
+ // identical types.
+ //
+ // In general, we have to use types.Identical to test type
+ // identity, because == gives false negatives for anonymous
+ // types and the byte/uint8 and rune/int32 builtin type
+ // aliases. However, this is not a problem here, because
+ // constant expressions are always untyped or have a named
+ // type, and we explicitly handle the builtin type aliases
+ // below.
+ //
+ // This approach may need to be revisited though if we fix
+ // #21866 by treating all type aliases like byte/uint8 and
+ // rune/int32.
+
+ typ := n.Type()
+ switch typ {
+ case types.ByteType:
+ typ = types.Types[types.TUINT8]
+ case types.RuneType:
+ typ = types.Types[types.TINT32]
+ }
+ k := constSetKey{typ, ir.ConstValue(n)}
+
+ if ir.HasUniquePos(n) {
+ pos = n.Pos()
+ }
+
+ if s.m == nil {
+ s.m = make(map[constSetKey]src.XPos)
+ }
+
+ if prevPos, isDup := s.m[k]; isDup {
+ base.ErrorfAt(pos, "duplicate %s %s in %s\n\tprevious %s at %v",
+ what, nodeAndVal(n), where,
+ what, base.FmtPos(prevPos))
+ } else {
+ s.m[k] = pos
+ }
+}
+
+// nodeAndVal reports both an expression and its constant value, if
+// the latter is non-obvious.
+//
+// TODO(mdempsky): This could probably be a fmt.go flag.
+func nodeAndVal(n ir.Node) string {
+ show := fmt.Sprint(n)
+ val := ir.ConstValue(n)
+ if s := fmt.Sprintf("%#v", val); show != s {
+ show += " (value " + s + ")"
+ }
+ return show
+}
+
+// evalunsafe evaluates a package unsafe operation and returns the result.
+func evalunsafe(n ir.Node) int64 {
+ switch n.Op() {
+ case ir.OALIGNOF, ir.OSIZEOF:
+ n := n.(*ir.UnaryExpr)
+ n.X = Expr(n.X)
+ n.X = DefaultLit(n.X, nil)
+ tr := n.X.Type()
+ if tr == nil {
+ return 0
+ }
+ types.CalcSize(tr)
+ if n.Op() == ir.OALIGNOF {
+ return int64(tr.Align)
+ }
+ return tr.Width
+
+ case ir.OOFFSETOF:
+ // must be a selector.
+ n := n.(*ir.UnaryExpr)
+ if n.X.Op() != ir.OXDOT {
+ base.Errorf("invalid expression %v", n)
+ return 0
+ }
+ sel := n.X.(*ir.SelectorExpr)
+
+ // Remember base of selector to find it back after dot insertion.
+ // Since r->left may be mutated by typechecking, check it explicitly
+ // first to track it correctly.
+ sel.X = Expr(sel.X)
+ sbase := sel.X
+
+ tsel := Expr(sel)
+ n.X = tsel
+ if tsel.Type() == nil {
+ return 0
+ }
+ switch tsel.Op() {
+ case ir.ODOT, ir.ODOTPTR:
+ break
+ case ir.OCALLPART:
+ base.Errorf("invalid expression %v: argument is a method value", n)
+ return 0
+ default:
+ base.Errorf("invalid expression %v", n)
+ return 0
+ }
+
+ // Sum offsets for dots until we reach sbase.
+ var v int64
+ var next ir.Node
+ for r := tsel; r != sbase; r = next {
+ switch r.Op() {
+ case ir.ODOTPTR:
+ // For Offsetof(s.f), s may itself be a pointer,
+ // but accessing f must not otherwise involve
+ // indirection via embedded pointer types.
+ r := r.(*ir.SelectorExpr)
+ if r.X != sbase {
+ base.Errorf("invalid expression %v: selector implies indirection of embedded %v", n, r.X)
+ return 0
+ }
+ fallthrough
+ case ir.ODOT:
+ r := r.(*ir.SelectorExpr)
+ v += r.Offset
+ next = r.X
+ default:
+ ir.Dump("unsafenmagic", tsel)
+ base.Fatalf("impossible %v node after dot insertion", r.Op())
+ }
+ }
+ return v
+ }
+
+ base.Fatalf("unexpected op %v", n.Op())
+ return 0
+}
diff --git a/src/cmd/compile/internal/typecheck/dcl.go b/src/cmd/compile/internal/typecheck/dcl.go
new file mode 100644
index 0000000000..9f66d0fa17
--- /dev/null
+++ b/src/cmd/compile/internal/typecheck/dcl.go
@@ -0,0 +1,705 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typecheck
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/types"
+ "cmd/internal/src"
+)
+
+var DeclContext ir.Class // PEXTERN/PAUTO
+
+func AssignDefn(left []ir.Node, defn ir.Node) {
+ for _, n := range left {
+ if n.Sym() != nil {
+ n.Sym().SetUniq(true)
+ }
+ }
+
+ var nnew, nerr int
+ for i, n := range left {
+ if ir.IsBlank(n) {
+ continue
+ }
+ if !assignableName(n) {
+ base.ErrorfAt(defn.Pos(), "non-name %v on left side of :=", n)
+ nerr++
+ continue
+ }
+
+ if !n.Sym().Uniq() {
+ base.ErrorfAt(defn.Pos(), "%v repeated on left side of :=", n.Sym())
+ n.SetDiag(true)
+ nerr++
+ continue
+ }
+
+ n.Sym().SetUniq(false)
+ if n.Sym().Block == types.Block {
+ continue
+ }
+
+ nnew++
+ n := NewName(n.Sym())
+ Declare(n, DeclContext)
+ n.Defn = defn
+ defn.PtrInit().Append(ir.NewDecl(base.Pos, ir.ODCL, n))
+ left[i] = n
+ }
+
+ if nnew == 0 && nerr == 0 {
+ base.ErrorfAt(defn.Pos(), "no new variables on left side of :=")
+ }
+}
+
+// := declarations
+func assignableName(n ir.Node) bool {
+ switch n.Op() {
+ case ir.ONAME,
+ ir.ONONAME,
+ ir.OPACK,
+ ir.OTYPE,
+ ir.OLITERAL:
+ return n.Sym() != nil
+ }
+
+ return false
+}
+
+func DeclFunc(sym *types.Sym, tfn ir.Ntype) *ir.Func {
+ if tfn.Op() != ir.OTFUNC {
+ base.Fatalf("expected OTFUNC node, got %v", tfn)
+ }
+
+ fn := ir.NewFunc(base.Pos)
+ fn.Nname = ir.NewFuncNameAt(base.Pos, sym, fn)
+ fn.Nname.Defn = fn
+ fn.Nname.Ntype = tfn
+ ir.MarkFunc(fn.Nname)
+ StartFuncBody(fn)
+ fn.Nname.Ntype = typecheckNtype(fn.Nname.Ntype)
+ return fn
+}
+
+// declare variables from grammar
+// new_name_list (type | [type] = expr_list)
+func DeclVars(vl []*ir.Name, t ir.Ntype, el []ir.Node) []ir.Node {
+ var init []ir.Node
+ doexpr := len(el) > 0
+
+ if len(el) == 1 && len(vl) > 1 {
+ e := el[0]
+ as2 := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
+ as2.Rhs = []ir.Node{e}
+ for _, v := range vl {
+ as2.Lhs.Append(v)
+ Declare(v, DeclContext)
+ v.Ntype = t
+ v.Defn = as2
+ if ir.CurFunc != nil {
+ init = append(init, ir.NewDecl(base.Pos, ir.ODCL, v))
+ }
+ }
+
+ return append(init, as2)
+ }
+
+ for i, v := range vl {
+ var e ir.Node
+ if doexpr {
+ if i >= len(el) {
+ base.Errorf("assignment mismatch: %d variables but %d values", len(vl), len(el))
+ break
+ }
+ e = el[i]
+ }
+
+ Declare(v, DeclContext)
+ v.Ntype = t
+
+ if e != nil || ir.CurFunc != nil || ir.IsBlank(v) {
+ if ir.CurFunc != nil {
+ init = append(init, ir.NewDecl(base.Pos, ir.ODCL, v))
+ }
+ as := ir.NewAssignStmt(base.Pos, v, e)
+ init = append(init, as)
+ if e != nil {
+ v.Defn = as
+ }
+ }
+ }
+
+ if len(el) > len(vl) {
+ base.Errorf("assignment mismatch: %d variables but %d values", len(vl), len(el))
+ }
+ return init
+}
+
+// Declare records that Node n declares symbol n.Sym in the specified
+// declaration context.
+func Declare(n *ir.Name, ctxt ir.Class) {
+ if ir.IsBlank(n) {
+ return
+ }
+
+ s := n.Sym()
+
+ // kludgy: typecheckok means we're past parsing. Eg genwrapper may declare out of package names later.
+ if !inimport && !TypecheckAllowed && s.Pkg != types.LocalPkg {
+ base.ErrorfAt(n.Pos(), "cannot declare name %v", s)
+ }
+
+ gen := 0
+ if ctxt == ir.PEXTERN {
+ if s.Name == "init" {
+ base.ErrorfAt(n.Pos(), "cannot declare init - must be func")
+ }
+ if s.Name == "main" && s.Pkg.Name == "main" {
+ base.ErrorfAt(n.Pos(), "cannot declare main - must be func")
+ }
+ Target.Externs = append(Target.Externs, n)
+ } else {
+ if ir.CurFunc == nil && ctxt == ir.PAUTO {
+ base.Pos = n.Pos()
+ base.Fatalf("automatic outside function")
+ }
+ if ir.CurFunc != nil && ctxt != ir.PFUNC && n.Op() == ir.ONAME {
+ ir.CurFunc.Dcl = append(ir.CurFunc.Dcl, n)
+ }
+ if n.Op() == ir.OTYPE {
+ declare_typegen++
+ gen = declare_typegen
+ } else if n.Op() == ir.ONAME && ctxt == ir.PAUTO && !strings.Contains(s.Name, "·") {
+ vargen++
+ gen = vargen
+ }
+ types.Pushdcl(s)
+ n.Curfn = ir.CurFunc
+ }
+
+ if ctxt == ir.PAUTO {
+ n.SetFrameOffset(0)
+ }
+
+ if s.Block == types.Block {
+ // functype will print errors about duplicate function arguments.
+ // Don't repeat the error here.
+ if ctxt != ir.PPARAM && ctxt != ir.PPARAMOUT {
+ Redeclared(n.Pos(), s, "in this block")
+ }
+ }
+
+ s.Block = types.Block
+ s.Lastlineno = base.Pos
+ s.Def = n
+ n.Vargen = int32(gen)
+ n.Class_ = ctxt
+ if ctxt == ir.PFUNC {
+ n.Sym().SetFunc(true)
+ }
+
+ autoexport(n, ctxt)
+}
+
+// Export marks n for export (or reexport).
+func Export(n *ir.Name) {
+ if n.Sym().OnExportList() {
+ return
+ }
+ n.Sym().SetOnExportList(true)
+
+ if base.Flag.E != 0 {
+ fmt.Printf("export symbol %v\n", n.Sym())
+ }
+
+ Target.Exports = append(Target.Exports, n)
+}
+
+// Redeclared emits a diagnostic about symbol s being redeclared at pos.
+func Redeclared(pos src.XPos, s *types.Sym, where string) {
+ if !s.Lastlineno.IsKnown() {
+ pkgName := DotImportRefs[s.Def.(*ir.Ident)]
+ base.ErrorfAt(pos, "%v redeclared %s\n"+
+ "\t%v: previous declaration during import %q", s, where, base.FmtPos(pkgName.Pos()), pkgName.Pkg.Path)
+ } else {
+ prevPos := s.Lastlineno
+
+ // When an import and a declaration collide in separate files,
+ // present the import as the "redeclared", because the declaration
+ // is visible where the import is, but not vice versa.
+ // See issue 4510.
+ if s.Def == nil {
+ pos, prevPos = prevPos, pos
+ }
+
+ base.ErrorfAt(pos, "%v redeclared %s\n"+
+ "\t%v: previous declaration", s, where, base.FmtPos(prevPos))
+ }
+}
+
+// declare the function proper
+// and declare the arguments.
+// called in extern-declaration context
+// returns in auto-declaration context.
+func StartFuncBody(fn *ir.Func) {
+ // change the declaration context from extern to auto
+ funcStack = append(funcStack, funcStackEnt{ir.CurFunc, DeclContext})
+ ir.CurFunc = fn
+ DeclContext = ir.PAUTO
+
+ types.Markdcl()
+
+ if fn.Nname.Ntype != nil {
+ funcargs(fn.Nname.Ntype.(*ir.FuncType))
+ } else {
+ funcargs2(fn.Type())
+ }
+}
+
+// finish the body.
+// called in auto-declaration context.
+// returns in extern-declaration context.
+func FinishFuncBody() {
+ // change the declaration context from auto to previous context
+ types.Popdcl()
+ var e funcStackEnt
+ funcStack, e = funcStack[:len(funcStack)-1], funcStack[len(funcStack)-1]
+ ir.CurFunc, DeclContext = e.curfn, e.dclcontext
+}
+
+func CheckFuncStack() {
+ if len(funcStack) != 0 {
+ base.Fatalf("funcStack is non-empty: %v", len(funcStack))
+ }
+}
+
+// turn a parsed function declaration into a type
+func NewFuncType(nrecv *ir.Field, nparams, nresults []*ir.Field) *types.Type {
+ funarg := func(n *ir.Field) *types.Field {
+ lno := base.Pos
+ base.Pos = n.Pos
+
+ if n.Ntype != nil {
+ n.Type = typecheckNtype(n.Ntype).Type()
+ n.Ntype = nil
+ }
+
+ f := types.NewField(n.Pos, n.Sym, n.Type)
+ f.SetIsDDD(n.IsDDD)
+ if n.Decl != nil {
+ n.Decl.SetType(f.Type)
+ f.Nname = n.Decl
+ }
+
+ base.Pos = lno
+ return f
+ }
+ funargs := func(nn []*ir.Field) []*types.Field {
+ res := make([]*types.Field, len(nn))
+ for i, n := range nn {
+ res[i] = funarg(n)
+ }
+ return res
+ }
+
+ var recv *types.Field
+ if nrecv != nil {
+ recv = funarg(nrecv)
+ }
+
+ t := types.NewSignature(types.LocalPkg, recv, funargs(nparams), funargs(nresults))
+ checkdupfields("argument", t.Recvs().FieldSlice(), t.Params().FieldSlice(), t.Results().FieldSlice())
+ return t
+}
+
+// convert a parsed id/type list into
+// a type for struct/interface/arglist
+func NewStructType(l []*ir.Field) *types.Type {
+ lno := base.Pos
+
+ fields := make([]*types.Field, len(l))
+ for i, n := range l {
+ base.Pos = n.Pos
+
+ if n.Ntype != nil {
+ n.Type = typecheckNtype(n.Ntype).Type()
+ n.Ntype = nil
+ }
+ f := types.NewField(n.Pos, n.Sym, n.Type)
+ if n.Embedded {
+ checkembeddedtype(n.Type)
+ f.Embedded = 1
+ }
+ f.Note = n.Note
+ fields[i] = f
+ }
+ checkdupfields("field", fields)
+
+ base.Pos = lno
+ return types.NewStruct(types.LocalPkg, fields)
+}
+
+// Add a method, declared as a function.
+// - msym is the method symbol
+// - t is function type (with receiver)
+// Returns a pointer to the existing or added Field; or nil if there's an error.
+func addmethod(n *ir.Func, msym *types.Sym, t *types.Type, local, nointerface bool) *types.Field {
+ if msym == nil {
+ base.Fatalf("no method symbol")
+ }
+
+ // get parent type sym
+ rf := t.Recv() // ptr to this structure
+ if rf == nil {
+ base.Errorf("missing receiver")
+ return nil
+ }
+
+ mt := types.ReceiverBaseType(rf.Type)
+ if mt == nil || mt.Sym() == nil {
+ pa := rf.Type
+ t := pa
+ if t != nil && t.IsPtr() {
+ if t.Sym() != nil {
+ base.Errorf("invalid receiver type %v (%v is a pointer type)", pa, t)
+ return nil
+ }
+ t = t.Elem()
+ }
+
+ switch {
+ case t == nil || t.Broke():
+ // rely on typecheck having complained before
+ case t.Sym() == nil:
+ base.Errorf("invalid receiver type %v (%v is not a defined type)", pa, t)
+ case t.IsPtr():
+ base.Errorf("invalid receiver type %v (%v is a pointer type)", pa, t)
+ case t.IsInterface():
+ base.Errorf("invalid receiver type %v (%v is an interface type)", pa, t)
+ default:
+ // Should have picked off all the reasons above,
+ // but just in case, fall back to generic error.
+ base.Errorf("invalid receiver type %v (%L / %L)", pa, pa, t)
+ }
+ return nil
+ }
+
+ if local && mt.Sym().Pkg != types.LocalPkg {
+ base.Errorf("cannot define new methods on non-local type %v", mt)
+ return nil
+ }
+
+ if msym.IsBlank() {
+ return nil
+ }
+
+ if mt.IsStruct() {
+ for _, f := range mt.Fields().Slice() {
+ if f.Sym == msym {
+ base.Errorf("type %v has both field and method named %v", mt, msym)
+ f.SetBroke(true)
+ return nil
+ }
+ }
+ }
+
+ for _, f := range mt.Methods().Slice() {
+ if msym.Name != f.Sym.Name {
+ continue
+ }
+ // types.Identical only checks that incoming and result parameters match,
+ // so explicitly check that the receiver parameters match too.
+ if !types.Identical(t, f.Type) || !types.Identical(t.Recv().Type, f.Type.Recv().Type) {
+ base.Errorf("method redeclared: %v.%v\n\t%v\n\t%v", mt, msym, f.Type, t)
+ }
+ return f
+ }
+
+ f := types.NewField(base.Pos, msym, t)
+ f.Nname = n.Nname
+ f.SetNointerface(nointerface)
+
+ mt.Methods().Append(f)
+ return f
+}
+
+func autoexport(n *ir.Name, ctxt ir.Class) {
+ if n.Sym().Pkg != types.LocalPkg {
+ return
+ }
+ if (ctxt != ir.PEXTERN && ctxt != ir.PFUNC) || DeclContext != ir.PEXTERN {
+ return
+ }
+ if n.Type() != nil && n.Type().IsKind(types.TFUNC) && ir.IsMethod(n) {
+ return
+ }
+
+ if types.IsExported(n.Sym().Name) || initname(n.Sym().Name) {
+ Export(n)
+ }
+ if base.Flag.AsmHdr != "" && !n.Sym().Asm() {
+ n.Sym().SetAsm(true)
+ Target.Asms = append(Target.Asms, n)
+ }
+}
+
+// checkdupfields emits errors for duplicately named fields or methods in
+// a list of struct or interface types.
+func checkdupfields(what string, fss ...[]*types.Field) {
+ seen := make(map[*types.Sym]bool)
+ for _, fs := range fss {
+ for _, f := range fs {
+ if f.Sym == nil || f.Sym.IsBlank() {
+ continue
+ }
+ if seen[f.Sym] {
+ base.ErrorfAt(f.Pos, "duplicate %s %s", what, f.Sym.Name)
+ continue
+ }
+ seen[f.Sym] = true
+ }
+ }
+}
+
+// structs, functions, and methods.
+// they don't belong here, but where do they belong?
+func checkembeddedtype(t *types.Type) {
+ if t == nil {
+ return
+ }
+
+ if t.Sym() == nil && t.IsPtr() {
+ t = t.Elem()
+ if t.IsInterface() {
+ base.Errorf("embedded type cannot be a pointer to interface")
+ }
+ }
+
+ if t.IsPtr() || t.IsUnsafePtr() {
+ base.Errorf("embedded type cannot be a pointer")
+ } else if t.Kind() == types.TFORW && !t.ForwardType().Embedlineno.IsKnown() {
+ t.ForwardType().Embedlineno = base.Pos
+ }
+}
+
+// declare individual names - var, typ, const
+
+var declare_typegen int
+
+func fakeRecvField() *types.Field {
+ return types.NewField(src.NoXPos, nil, types.FakeRecvType())
+}
+
+var funcStack []funcStackEnt // stack of previous values of Curfn/dclcontext
+
+type funcStackEnt struct {
+ curfn *ir.Func
+ dclcontext ir.Class
+}
+
+func funcarg(n *ir.Field, ctxt ir.Class) {
+ if n.Sym == nil {
+ return
+ }
+
+ name := ir.NewNameAt(n.Pos, n.Sym)
+ n.Decl = name
+ name.Ntype = n.Ntype
+ name.SetIsDDD(n.IsDDD)
+ Declare(name, ctxt)
+
+ vargen++
+ n.Decl.Vargen = int32(vargen)
+}
+
+func funcarg2(f *types.Field, ctxt ir.Class) {
+ if f.Sym == nil {
+ return
+ }
+ n := ir.NewNameAt(f.Pos, f.Sym)
+ f.Nname = n
+ n.SetType(f.Type)
+ n.SetIsDDD(f.IsDDD())
+ Declare(n, ctxt)
+}
+
+func funcargs(nt *ir.FuncType) {
+ if nt.Op() != ir.OTFUNC {
+ base.Fatalf("funcargs %v", nt.Op())
+ }
+
+ // re-start the variable generation number
+ // we want to use small numbers for the return variables,
+ // so let them have the chunk starting at 1.
+ //
+ // TODO(mdempsky): This is ugly, and only necessary because
+ // esc.go uses Vargen to figure out result parameters' index
+ // within the result tuple.
+ vargen = len(nt.Results)
+
+ // declare the receiver and in arguments.
+ if nt.Recv != nil {
+ funcarg(nt.Recv, ir.PPARAM)
+ }
+ for _, n := range nt.Params {
+ funcarg(n, ir.PPARAM)
+ }
+
+ oldvargen := vargen
+ vargen = 0
+
+ // declare the out arguments.
+ gen := len(nt.Params)
+ for _, n := range nt.Results {
+ if n.Sym == nil {
+ // Name so that escape analysis can track it. ~r stands for 'result'.
+ n.Sym = LookupNum("~r", gen)
+ gen++
+ }
+ if n.Sym.IsBlank() {
+ // Give it a name so we can assign to it during return. ~b stands for 'blank'.
+ // The name must be different from ~r above because if you have
+ // func f() (_ int)
+ // func g() int
+ // f is allowed to use a plain 'return' with no arguments, while g is not.
+ // So the two cases must be distinguished.
+ n.Sym = LookupNum("~b", gen)
+ gen++
+ }
+
+ funcarg(n, ir.PPARAMOUT)
+ }
+
+ vargen = oldvargen
+}
+
+// Same as funcargs, except run over an already constructed TFUNC.
+// This happens during import, where the hidden_fndcl rule has
+// used functype directly to parse the function's type.
+func funcargs2(t *types.Type) {
+ if t.Kind() != types.TFUNC {
+ base.Fatalf("funcargs2 %v", t)
+ }
+
+ for _, f := range t.Recvs().Fields().Slice() {
+ funcarg2(f, ir.PPARAM)
+ }
+ for _, f := range t.Params().Fields().Slice() {
+ funcarg2(f, ir.PPARAM)
+ }
+ for _, f := range t.Results().Fields().Slice() {
+ funcarg2(f, ir.PPARAMOUT)
+ }
+}
+
+func initname(s string) bool {
+ return s == "init"
+}
+
+func tointerface(nmethods []*ir.Field) *types.Type {
+ if len(nmethods) == 0 {
+ return types.Types[types.TINTER]
+ }
+
+ lno := base.Pos
+
+ methods := make([]*types.Field, len(nmethods))
+ for i, n := range nmethods {
+ base.Pos = n.Pos
+ if n.Ntype != nil {
+ n.Type = typecheckNtype(n.Ntype).Type()
+ n.Ntype = nil
+ }
+ methods[i] = types.NewField(n.Pos, n.Sym, n.Type)
+ }
+
+ base.Pos = lno
+ return types.NewInterface(types.LocalPkg, methods)
+}
+
+var vargen int
+
+func Temp(t *types.Type) *ir.Name {
+ return TempAt(base.Pos, ir.CurFunc, t)
+}
+
+// make a new Node off the books
+func TempAt(pos src.XPos, curfn *ir.Func, t *types.Type) *ir.Name {
+ if curfn == nil {
+ base.Fatalf("no curfn for tempAt")
+ }
+ if curfn.Op() == ir.OCLOSURE {
+ ir.Dump("tempAt", curfn)
+ base.Fatalf("adding tempAt to wrong closure function")
+ }
+ if t == nil {
+ base.Fatalf("tempAt called with nil type")
+ }
+
+ s := &types.Sym{
+ Name: autotmpname(len(curfn.Dcl)),
+ Pkg: types.LocalPkg,
+ }
+ n := ir.NewNameAt(pos, s)
+ s.Def = n
+ n.SetType(t)
+ n.Class_ = ir.PAUTO
+ n.SetEsc(ir.EscNever)
+ n.Curfn = curfn
+ n.SetUsed(true)
+ n.SetAutoTemp(true)
+ curfn.Dcl = append(curfn.Dcl, n)
+
+ types.CalcSize(t)
+
+ return n
+}
+
+// autotmpname returns the name for an autotmp variable numbered n.
+func autotmpname(n int) string {
+ // Give each tmp a different name so that they can be registerized.
+ // Add a preceding . to avoid clashing with legal names.
+ const prefix = ".autotmp_"
+ // Start with a buffer big enough to hold a large n.
+ b := []byte(prefix + " ")[:len(prefix)]
+ b = strconv.AppendInt(b, int64(n), 10)
+ return types.InternString(b)
+}
+
+// f is method type, with receiver.
+// return function type, receiver as first argument (or not).
+func NewMethodType(f *types.Type, receiver *types.Type) *types.Type {
+ inLen := f.Params().Fields().Len()
+ if receiver != nil {
+ inLen++
+ }
+ in := make([]*ir.Field, 0, inLen)
+
+ if receiver != nil {
+ d := ir.NewField(base.Pos, nil, nil, receiver)
+ in = append(in, d)
+ }
+
+ for _, t := range f.Params().Fields().Slice() {
+ d := ir.NewField(base.Pos, nil, nil, t.Type)
+ d.IsDDD = t.IsDDD()
+ in = append(in, d)
+ }
+
+ outLen := f.Results().Fields().Len()
+ out := make([]*ir.Field, 0, outLen)
+ for _, t := range f.Results().Fields().Slice() {
+ d := ir.NewField(base.Pos, nil, nil, t.Type)
+ out = append(out, d)
+ }
+
+ return NewFuncType(nil, in, out)
+}
diff --git a/src/cmd/compile/internal/typecheck/export.go b/src/cmd/compile/internal/typecheck/export.go
new file mode 100644
index 0000000000..381a28e3ed
--- /dev/null
+++ b/src/cmd/compile/internal/typecheck/export.go
@@ -0,0 +1,79 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typecheck
+
+import (
+ "go/constant"
+
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/types"
+ "cmd/internal/src"
+)
+
+// importalias declares symbol s as an imported type alias with type t.
+// ipkg is the package being imported
+func importalias(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) *ir.Name {
+ return importobj(ipkg, pos, s, ir.OTYPE, ir.PEXTERN, t)
+}
+
+// importconst declares symbol s as an imported constant with type t and value val.
+// ipkg is the package being imported
+func importconst(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type, val constant.Value) *ir.Name {
+ n := importobj(ipkg, pos, s, ir.OLITERAL, ir.PEXTERN, t)
+ n.SetVal(val)
+ return n
+}
+
+// importfunc declares symbol s as an imported function with type t.
+// ipkg is the package being imported
+func importfunc(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) *ir.Name {
+ n := importobj(ipkg, pos, s, ir.ONAME, ir.PFUNC, t)
+
+ fn := ir.NewFunc(pos)
+ fn.SetType(t)
+ n.SetFunc(fn)
+ fn.Nname = n
+
+ return n
+}
+
+// importobj declares symbol s as an imported object representable by op.
+// ipkg is the package being imported
+func importobj(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op ir.Op, ctxt ir.Class, t *types.Type) *ir.Name {
+ n := importsym(ipkg, pos, s, op, ctxt)
+ n.SetType(t)
+ if ctxt == ir.PFUNC {
+ n.Sym().SetFunc(true)
+ }
+ return n
+}
+
+func importsym(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op ir.Op, ctxt ir.Class) *ir.Name {
+ if n := s.PkgDef(); n != nil {
+ base.Fatalf("importsym of symbol that already exists: %v", n)
+ }
+
+ n := ir.NewDeclNameAt(pos, op, s)
+ n.Class_ = ctxt // TODO(mdempsky): Move this into NewDeclNameAt too?
+ s.SetPkgDef(n)
+ s.Importdef = ipkg
+ return n
+}
+
+// importtype returns the named type declared by symbol s.
+// If no such type has been declared yet, a forward declaration is returned.
+// ipkg is the package being imported
+func importtype(ipkg *types.Pkg, pos src.XPos, s *types.Sym) *ir.Name {
+ n := importsym(ipkg, pos, s, ir.OTYPE, ir.PEXTERN)
+ n.SetType(types.NewNamed(n))
+ return n
+}
+
+// importvar declares symbol s as an imported variable with type t.
+// ipkg is the package being imported
+func importvar(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) *ir.Name {
+ return importobj(ipkg, pos, s, ir.ONAME, ir.PEXTERN, t)
+}
diff --git a/src/cmd/compile/internal/typecheck/func.go b/src/cmd/compile/internal/typecheck/func.go
new file mode 100644
index 0000000000..4675de6cad
--- /dev/null
+++ b/src/cmd/compile/internal/typecheck/func.go
@@ -0,0 +1,398 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typecheck
+
+import (
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/types"
+
+ "fmt"
+)
+
+// package all the arguments that match a ... T parameter into a []T.
+func MakeDotArgs(typ *types.Type, args []ir.Node) ir.Node {
+ var n ir.Node
+ if len(args) == 0 {
+ n = NodNil()
+ n.SetType(typ)
+ } else {
+ lit := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(typ).(ir.Ntype), nil)
+ lit.List.Append(args...)
+ lit.SetImplicit(true)
+ n = lit
+ }
+
+ n = Expr(n)
+ if n.Type() == nil {
+ base.Fatalf("mkdotargslice: typecheck failed")
+ }
+ return n
+}
+
+// FixVariadicCall rewrites calls to variadic functions to use an
+// explicit ... argument if one is not already present.
+func FixVariadicCall(call *ir.CallExpr) {
+ fntype := call.X.Type()
+ if !fntype.IsVariadic() || call.IsDDD {
+ return
+ }
+
+ vi := fntype.NumParams() - 1
+ vt := fntype.Params().Field(vi).Type
+
+ args := call.Args
+ extra := args[vi:]
+ slice := MakeDotArgs(vt, extra)
+ for i := range extra {
+ extra[i] = nil // allow GC
+ }
+
+ call.Args.Set(append(args[:vi], slice))
+ call.IsDDD = true
+}
+
+// ClosureType returns the struct type used to hold all the information
+// needed in the closure for clo (clo must be a OCLOSURE node).
+// The address of a variable of the returned type can be cast to a func.
+func ClosureType(clo *ir.ClosureExpr) *types.Type {
+ // Create closure in the form of a composite literal.
+ // supposing the closure captures an int i and a string s
+ // and has one float64 argument and no results,
+ // the generated code looks like:
+ //
+ // clos = &struct{.F uintptr; i *int; s *string}{func.1, &i, &s}
+ //
+ // The use of the struct provides type information to the garbage
+ // collector so that it can walk the closure. We could use (in this case)
+ // [3]unsafe.Pointer instead, but that would leave the gc in the dark.
+ // The information appears in the binary in the form of type descriptors;
+ // the struct is unnamed so that closures in multiple packages with the
+ // same struct type can share the descriptor.
+ fields := []*ir.Field{
+ ir.NewField(base.Pos, Lookup(".F"), nil, types.Types[types.TUINTPTR]),
+ }
+ for _, v := range clo.Func.ClosureVars {
+ typ := v.Type()
+ if !v.Byval() {
+ typ = types.NewPtr(typ)
+ }
+ fields = append(fields, ir.NewField(base.Pos, v.Sym(), nil, typ))
+ }
+ typ := NewStructType(fields)
+ typ.SetNoalg(true)
+ return typ
+}
+
+// PartialCallType returns the struct type used to hold all the information
+// needed in the closure for n (n must be a OCALLPART node).
+// The address of a variable of the returned type can be cast to a func.
+func PartialCallType(n *ir.CallPartExpr) *types.Type {
+ t := NewStructType([]*ir.Field{
+ ir.NewField(base.Pos, Lookup("F"), nil, types.Types[types.TUINTPTR]),
+ ir.NewField(base.Pos, Lookup("R"), nil, n.X.Type()),
+ })
+ t.SetNoalg(true)
+ return t
+}
+
+// CaptureVars is called in a separate phase after all typechecking is done.
+// It decides whether each variable captured by a closure should be captured
+// by value or by reference.
+// We use value capturing for values <= 128 bytes that are never reassigned
+// after capturing (effectively constant).
+func CaptureVars(fn *ir.Func) {
+ lno := base.Pos
+ base.Pos = fn.Pos()
+ cvars := fn.ClosureVars
+ out := cvars[:0]
+ for _, v := range cvars {
+ if v.Type() == nil {
+ // If v.Type is nil, it means v looked like it
+ // was going to be used in the closure, but
+ // isn't. This happens in struct literals like
+ // s{f: x} where we can't distinguish whether
+ // f is a field identifier or expression until
+ // resolving s.
+ continue
+ }
+ out = append(out, v)
+
+ // type check the & of closed variables outside the closure,
+ // so that the outer frame also grabs them and knows they escape.
+ types.CalcSize(v.Type())
+
+ var outer ir.Node
+ outer = v.Outer
+ outermost := v.Defn.(*ir.Name)
+
+ // out parameters will be assigned to implicitly upon return.
+ if outermost.Class_ != ir.PPARAMOUT && !outermost.Name().Addrtaken() && !outermost.Name().Assigned() && v.Type().Width <= 128 {
+ v.SetByval(true)
+ } else {
+ outermost.Name().SetAddrtaken(true)
+ outer = NodAddr(outer)
+ }
+
+ if base.Flag.LowerM > 1 {
+ var name *types.Sym
+ if v.Curfn != nil && v.Curfn.Nname != nil {
+ name = v.Curfn.Sym()
+ }
+ how := "ref"
+ if v.Byval() {
+ how = "value"
+ }
+ base.WarnfAt(v.Pos(), "%v capturing by %s: %v (addr=%v assign=%v width=%d)", name, how, v.Sym(), outermost.Name().Addrtaken(), outermost.Name().Assigned(), int32(v.Type().Width))
+ }
+
+ outer = Expr(outer)
+ fn.ClosureEnter.Append(outer)
+ }
+
+ fn.ClosureVars = out
+ base.Pos = lno
+}
+
+// typecheckclosure typechecks an OCLOSURE node. It also creates the named
+// function associated with the closure.
+// TODO: This creation of the named function should probably really be done in a
+// separate pass from type-checking.
+func typecheckclosure(clo *ir.ClosureExpr, top int) {
+ fn := clo.Func
+ // Set current associated iota value, so iota can be used inside
+ // function in ConstSpec, see issue #22344
+ if x := getIotaValue(); x >= 0 {
+ fn.Iota = x
+ }
+
+ fn.ClosureType = check(fn.ClosureType, ctxType)
+ clo.SetType(fn.ClosureType.Type())
+ fn.SetClosureCalled(top&ctxCallee != 0)
+
+ // Do not typecheck fn twice, otherwise, we will end up pushing
+ // fn to Target.Decls multiple times, causing initLSym called twice.
+ // See #30709
+ if fn.Typecheck() == 1 {
+ return
+ }
+
+ for _, ln := range fn.ClosureVars {
+ n := ln.Defn
+ if !n.Name().Captured() {
+ n.Name().SetCaptured(true)
+ if n.Name().Decldepth == 0 {
+ base.Fatalf("typecheckclosure: var %v does not have decldepth assigned", n)
+ }
+
+ // Ignore assignments to the variable in straightline code
+ // preceding the first capturing by a closure.
+ if n.Name().Decldepth == decldepth {
+ n.Name().SetAssigned(false)
+ }
+ }
+ }
+
+ fn.Nname.SetSym(closurename(ir.CurFunc))
+ ir.MarkFunc(fn.Nname)
+ Func(fn)
+
+ // Type check the body now, but only if we're inside a function.
+ // At top level (in a variable initialization: curfn==nil) we're not
+ // ready to type check code yet; we'll check it later, because the
+ // underlying closure function we create is added to Target.Decls.
+ if ir.CurFunc != nil && clo.Type() != nil {
+ oldfn := ir.CurFunc
+ ir.CurFunc = fn
+ olddd := decldepth
+ decldepth = 1
+ Stmts(fn.Body)
+ decldepth = olddd
+ ir.CurFunc = oldfn
+ }
+
+ Target.Decls = append(Target.Decls, fn)
+}
+
+// Lazy typechecking of imported bodies. For local functions, caninl will set ->typecheck
+// because they're a copy of an already checked body.
+func ImportedBody(fn *ir.Func) {
+ lno := ir.SetPos(fn.Nname)
+
+ ImportBody(fn)
+
+ // typecheckinl is only for imported functions;
+ // their bodies may refer to unsafe as long as the package
+ // was marked safe during import (which was checked then).
+ // the ->inl of a local function has been typechecked before caninl copied it.
+ pkg := fnpkg(fn.Nname)
+
+ if pkg == types.LocalPkg || pkg == nil {
+ return // typecheckinl on local function
+ }
+
+ if base.Flag.LowerM > 2 || base.Debug.Export != 0 {
+ fmt.Printf("typecheck import [%v] %L { %v }\n", fn.Sym(), fn, ir.Nodes(fn.Inl.Body))
+ }
+
+ savefn := ir.CurFunc
+ ir.CurFunc = fn
+ Stmts(fn.Inl.Body)
+ ir.CurFunc = savefn
+
+ // During expandInline (which imports fn.Func.Inl.Body),
+ // declarations are added to fn.Func.Dcl by funcHdr(). Move them
+ // to fn.Func.Inl.Dcl for consistency with how local functions
+ // behave. (Append because typecheckinl may be called multiple
+ // times.)
+ fn.Inl.Dcl = append(fn.Inl.Dcl, fn.Dcl...)
+ fn.Dcl = nil
+
+ base.Pos = lno
+}
+
+// Get the function's package. For ordinary functions it's on the ->sym, but for imported methods
+// the ->sym can be re-used in the local package, so peel it off the receiver's type.
+func fnpkg(fn *ir.Name) *types.Pkg {
+ if ir.IsMethod(fn) {
+ // method
+ rcvr := fn.Type().Recv().Type
+
+ if rcvr.IsPtr() {
+ rcvr = rcvr.Elem()
+ }
+ if rcvr.Sym() == nil {
+ base.Fatalf("receiver with no sym: [%v] %L (%v)", fn.Sym(), fn, rcvr)
+ }
+ return rcvr.Sym().Pkg
+ }
+
+ // non-method
+ return fn.Sym().Pkg
+}
+
+// CaptureVarsComplete is set to true when the capturevars phase is done.
+var CaptureVarsComplete bool
+
+// closurename generates a new unique name for a closure within
+// outerfunc.
+func closurename(outerfunc *ir.Func) *types.Sym {
+ outer := "glob."
+ prefix := "func"
+ gen := &globClosgen
+
+ if outerfunc != nil {
+ if outerfunc.OClosure != nil {
+ prefix = ""
+ }
+
+ outer = ir.FuncName(outerfunc)
+
+ // There may be multiple functions named "_". In those
+ // cases, we can't use their individual Closgens as it
+ // would lead to name clashes.
+ if !ir.IsBlank(outerfunc.Nname) {
+ gen = &outerfunc.Closgen
+ }
+ }
+
+ *gen++
+ return Lookup(fmt.Sprintf("%s.%s%d", outer, prefix, *gen))
+}
+
+// globClosgen is like Func.Closgen, but for the global scope.
+var globClosgen int32
+
+// makepartialcall returns a DCLFUNC node representing the wrapper function (*-fm) needed
+// for partial calls.
+func makepartialcall(dot *ir.SelectorExpr, t0 *types.Type, meth *types.Sym) *ir.Func {
+ rcvrtype := dot.X.Type()
+ sym := ir.MethodSymSuffix(rcvrtype, meth, "-fm")
+
+ if sym.Uniq() {
+ return sym.Def.(*ir.Func)
+ }
+ sym.SetUniq(true)
+
+ savecurfn := ir.CurFunc
+ saveLineNo := base.Pos
+ ir.CurFunc = nil
+
+ // Set line number equal to the line number where the method is declared.
+ var m *types.Field
+ if lookdot0(meth, rcvrtype, &m, false) == 1 && m.Pos.IsKnown() {
+ base.Pos = m.Pos
+ }
+ // Note: !m.Pos.IsKnown() happens for method expressions where
+ // the method is implicitly declared. The Error method of the
+ // built-in error type is one such method. We leave the line
+ // number at the use of the method expression in this
+ // case. See issue 29389.
+
+ tfn := ir.NewFuncType(base.Pos, nil,
+ NewFuncParams(t0.Params(), true),
+ NewFuncParams(t0.Results(), false))
+
+ fn := DeclFunc(sym, tfn)
+ fn.SetDupok(true)
+ fn.SetNeedctxt(true)
+
+ // Declare and initialize variable holding receiver.
+ cr := ir.NewClosureRead(rcvrtype, types.Rnd(int64(types.PtrSize), int64(rcvrtype.Align)))
+ ptr := NewName(Lookup(".this"))
+ Declare(ptr, ir.PAUTO)
+ ptr.SetUsed(true)
+ var body []ir.Node
+ if rcvrtype.IsPtr() || rcvrtype.IsInterface() {
+ ptr.SetType(rcvrtype)
+ body = append(body, ir.NewAssignStmt(base.Pos, ptr, cr))
+ } else {
+ ptr.SetType(types.NewPtr(rcvrtype))
+ body = append(body, ir.NewAssignStmt(base.Pos, ptr, NodAddr(cr)))
+ }
+
+ call := ir.NewCallExpr(base.Pos, ir.OCALL, ir.NewSelectorExpr(base.Pos, ir.OXDOT, ptr, meth), nil)
+ call.Args.Set(ir.ParamNames(tfn.Type()))
+ call.IsDDD = tfn.Type().IsVariadic()
+ if t0.NumResults() != 0 {
+ ret := ir.NewReturnStmt(base.Pos, nil)
+ ret.Results = []ir.Node{call}
+ body = append(body, ret)
+ } else {
+ body = append(body, call)
+ }
+
+ fn.Body.Set(body)
+ FinishFuncBody()
+
+ Func(fn)
+ // Need to typecheck the body of the just-generated wrapper.
+ // typecheckslice() requires that Curfn is set when processing an ORETURN.
+ ir.CurFunc = fn
+ Stmts(fn.Body)
+ sym.Def = fn
+ Target.Decls = append(Target.Decls, fn)
+ ir.CurFunc = savecurfn
+ base.Pos = saveLineNo
+
+ return fn
+}
+
+func typecheckpartialcall(n ir.Node, sym *types.Sym) *ir.CallPartExpr {
+ switch n.Op() {
+ case ir.ODOTINTER, ir.ODOTMETH:
+ break
+
+ default:
+ base.Fatalf("invalid typecheckpartialcall")
+ }
+ dot := n.(*ir.SelectorExpr)
+
+ // Create top-level function.
+ fn := makepartialcall(dot, dot.Type(), sym)
+ fn.SetWrapper(true)
+
+ return ir.NewCallPartExpr(dot.Pos(), dot.X, dot.Selection, fn)
+}
diff --git a/src/cmd/compile/internal/typecheck/iexport.go b/src/cmd/compile/internal/typecheck/iexport.go
new file mode 100644
index 0000000000..4ddee01b5a
--- /dev/null
+++ b/src/cmd/compile/internal/typecheck/iexport.go
@@ -0,0 +1,1614 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Indexed package export.
+//
+// The indexed export data format is an evolution of the previous
+// binary export data format. Its chief contribution is introducing an
+// index table, which allows efficient random access of individual
+// declarations and inline function bodies. In turn, this allows
+// avoiding unnecessary work for compilation units that import large
+// packages.
+//
+//
+// The top-level data format is structured as:
+//
+// Header struct {
+// Tag byte // 'i'
+// Version uvarint
+// StringSize uvarint
+// DataSize uvarint
+// }
+//
+// Strings [StringSize]byte
+// Data [DataSize]byte
+//
+// MainIndex []struct{
+// PkgPath stringOff
+// PkgName stringOff
+// PkgHeight uvarint
+//
+// Decls []struct{
+// Name stringOff
+// Offset declOff
+// }
+// }
+//
+// Fingerprint [8]byte
+//
+// uvarint means a uint64 written out using uvarint encoding.
+//
+// []T means a uvarint followed by that many T objects. In other
+// words:
+//
+// Len uvarint
+// Elems [Len]T
+//
+// stringOff means a uvarint that indicates an offset within the
+// Strings section. At that offset is another uvarint, followed by
+// that many bytes, which form the string value.
+//
+// declOff means a uvarint that indicates an offset within the Data
+// section where the associated declaration can be found.
+//
+//
+// There are five kinds of declarations, distinguished by their first
+// byte:
+//
+// type Var struct {
+// Tag byte // 'V'
+// Pos Pos
+// Type typeOff
+// }
+//
+// type Func struct {
+// Tag byte // 'F'
+// Pos Pos
+// Signature Signature
+// }
+//
+// type Const struct {
+// Tag byte // 'C'
+// Pos Pos
+// Value Value
+// }
+//
+// type Type struct {
+// Tag byte // 'T'
+// Pos Pos
+// Underlying typeOff
+//
+// Methods []struct{ // omitted if Underlying is an interface type
+// Pos Pos
+// Name stringOff
+// Recv Param
+// Signature Signature
+// }
+// }
+//
+// type Alias struct {
+// Tag byte // 'A'
+// Pos Pos
+// Type typeOff
+// }
+//
+//
+// typeOff means a uvarint that either indicates a predeclared type,
+// or an offset into the Data section. If the uvarint is less than
+// predeclReserved, then it indicates the index into the predeclared
+// types list (see predeclared in bexport.go for order). Otherwise,
+// subtracting predeclReserved yields the offset of a type descriptor.
+//
+// Value means a type and type-specific value. See
+// (*exportWriter).value for details.
+//
+//
+// There are nine kinds of type descriptors, distinguished by an itag:
+//
+// type DefinedType struct {
+// Tag itag // definedType
+// Name stringOff
+// PkgPath stringOff
+// }
+//
+// type PointerType struct {
+// Tag itag // pointerType
+// Elem typeOff
+// }
+//
+// type SliceType struct {
+// Tag itag // sliceType
+// Elem typeOff
+// }
+//
+// type ArrayType struct {
+// Tag itag // arrayType
+// Len uint64
+// Elem typeOff
+// }
+//
+// type ChanType struct {
+// Tag itag // chanType
+// Dir uint64 // 1 RecvOnly; 2 SendOnly; 3 SendRecv
+// Elem typeOff
+// }
+//
+// type MapType struct {
+// Tag itag // mapType
+// Key typeOff
+// Elem typeOff
+// }
+//
+// type FuncType struct {
+// Tag itag // signatureType
+// PkgPath stringOff
+// Signature Signature
+// }
+//
+// type StructType struct {
+// Tag itag // structType
+// PkgPath stringOff
+// Fields []struct {
+// Pos Pos
+// Name stringOff
+// Type typeOff
+// Embedded bool
+// Note stringOff
+// }
+// }
+//
+// type InterfaceType struct {
+// Tag itag // interfaceType
+// PkgPath stringOff
+// Embeddeds []struct {
+// Pos Pos
+// Type typeOff
+// }
+// Methods []struct {
+// Pos Pos
+// Name stringOff
+// Signature Signature
+// }
+// }
+//
+//
+// type Signature struct {
+// Params []Param
+// Results []Param
+// Variadic bool // omitted if Results is empty
+// }
+//
+// type Param struct {
+// Pos Pos
+// Name stringOff
+// Type typOff
+// }
+//
+//
+// Pos encodes a file:line:column triple, incorporating a simple delta
+// encoding scheme within a data object. See exportWriter.pos for
+// details.
+//
+//
+// Compiler-specific details.
+//
+// cmd/compile writes out a second index for inline bodies and also
+// appends additional compiler-specific details after declarations.
+// Third-party tools are not expected to depend on these details and
+// they're expected to change much more rapidly, so they're omitted
+// here. See exportWriter's varExt/funcExt/etc methods for details.
+
+package typecheck
+
+import (
+ "bufio"
+ "bytes"
+ "crypto/md5"
+ "encoding/binary"
+ "fmt"
+ "go/constant"
+ "io"
+ "math/big"
+ "sort"
+ "strings"
+
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/types"
+ "cmd/internal/goobj"
+ "cmd/internal/src"
+)
+
+// Current indexed export format version. Increase with each format change.
+// 1: added column details to Pos
+// 0: Go1.11 encoding
+const iexportVersion = 1
+
+// predeclReserved is the number of type offsets reserved for types
+// implicitly declared in the universe block.
+const predeclReserved = 32
+
+// An itag distinguishes the kind of type that was written into the
+// indexed export format.
+type itag uint64
+
+const (
+ // Types
+ definedType itag = iota
+ pointerType
+ sliceType
+ arrayType
+ chanType
+ mapType
+ signatureType
+ structType
+ interfaceType
+)
+
+func WriteExports(out *bufio.Writer) {
+ p := iexporter{
+ allPkgs: map[*types.Pkg]bool{},
+ stringIndex: map[string]uint64{},
+ declIndex: map[*types.Sym]uint64{},
+ inlineIndex: map[*types.Sym]uint64{},
+ typIndex: map[*types.Type]uint64{},
+ }
+
+ for i, pt := range predeclared() {
+ p.typIndex[pt] = uint64(i)
+ }
+ if len(p.typIndex) > predeclReserved {
+ base.Fatalf("too many predeclared types: %d > %d", len(p.typIndex), predeclReserved)
+ }
+
+ // Initialize work queue with exported declarations.
+ for _, n := range Target.Exports {
+ p.pushDecl(n)
+ }
+
+ // Loop until no more work. We use a queue because while
+ // writing out inline bodies, we may discover additional
+ // declarations that are needed.
+ for !p.declTodo.Empty() {
+ p.doDecl(p.declTodo.PopLeft())
+ }
+
+ // Append indices to data0 section.
+ dataLen := uint64(p.data0.Len())
+ w := p.newWriter()
+ w.writeIndex(p.declIndex, true)
+ w.writeIndex(p.inlineIndex, false)
+ w.flush()
+
+ if *base.Flag.LowerV {
+ fmt.Printf("export: hdr strings %v, data %v, index %v\n", p.strings.Len(), dataLen, p.data0.Len())
+ }
+
+ // Assemble header.
+ var hdr intWriter
+ hdr.WriteByte('i')
+ hdr.uint64(iexportVersion)
+ hdr.uint64(uint64(p.strings.Len()))
+ hdr.uint64(dataLen)
+
+ // Flush output.
+ h := md5.New()
+ wr := io.MultiWriter(out, h)
+ io.Copy(wr, &hdr)
+ io.Copy(wr, &p.strings)
+ io.Copy(wr, &p.data0)
+
+ // Add fingerprint (used by linker object file).
+ // Attach this to the end, so tools (e.g. gcimporter) don't care.
+ copy(base.Ctxt.Fingerprint[:], h.Sum(nil)[:])
+ out.Write(base.Ctxt.Fingerprint[:])
+}
+
+// writeIndex writes out a symbol index. mainIndex indicates whether
+// we're writing out the main index, which is also read by
+// non-compiler tools and includes a complete package description
+// (i.e., name and height).
+func (w *exportWriter) writeIndex(index map[*types.Sym]uint64, mainIndex bool) {
+ // Build a map from packages to symbols from that package.
+ pkgSyms := map[*types.Pkg][]*types.Sym{}
+
+ // For the main index, make sure to include every package that
+ // we reference, even if we're not exporting (or reexporting)
+ // any symbols from it.
+ if mainIndex {
+ pkgSyms[types.LocalPkg] = nil
+ for pkg := range w.p.allPkgs {
+ pkgSyms[pkg] = nil
+ }
+ }
+
+ // Group symbols by package.
+ for sym := range index {
+ pkgSyms[sym.Pkg] = append(pkgSyms[sym.Pkg], sym)
+ }
+
+ // Sort packages by path.
+ var pkgs []*types.Pkg
+ for pkg := range pkgSyms {
+ pkgs = append(pkgs, pkg)
+ }
+ sort.Slice(pkgs, func(i, j int) bool {
+ return pkgs[i].Path < pkgs[j].Path
+ })
+
+ w.uint64(uint64(len(pkgs)))
+ for _, pkg := range pkgs {
+ w.string(pkg.Path)
+ if mainIndex {
+ w.string(pkg.Name)
+ w.uint64(uint64(pkg.Height))
+ }
+
+ // Sort symbols within a package by name.
+ syms := pkgSyms[pkg]
+ sort.Slice(syms, func(i, j int) bool {
+ return syms[i].Name < syms[j].Name
+ })
+
+ w.uint64(uint64(len(syms)))
+ for _, sym := range syms {
+ w.string(sym.Name)
+ w.uint64(index[sym])
+ }
+ }
+}
+
+type iexporter struct {
+ // allPkgs tracks all packages that have been referenced by
+ // the export data, so we can ensure to include them in the
+ // main index.
+ allPkgs map[*types.Pkg]bool
+
+ declTodo ir.NameQueue
+
+ strings intWriter
+ stringIndex map[string]uint64
+
+ data0 intWriter
+ declIndex map[*types.Sym]uint64
+ inlineIndex map[*types.Sym]uint64
+ typIndex map[*types.Type]uint64
+}
+
+// stringOff returns the offset of s within the string section.
+// If not already present, it's added to the end.
+func (p *iexporter) stringOff(s string) uint64 {
+ off, ok := p.stringIndex[s]
+ if !ok {
+ off = uint64(p.strings.Len())
+ p.stringIndex[s] = off
+
+ if *base.Flag.LowerV {
+ fmt.Printf("export: str %v %.40q\n", off, s)
+ }
+
+ p.strings.uint64(uint64(len(s)))
+ p.strings.WriteString(s)
+ }
+ return off
+}
+
+// pushDecl adds n to the declaration work queue, if not already present.
+func (p *iexporter) pushDecl(n *ir.Name) {
+ if n.Sym() == nil || n.Sym().Def != n && n.Op() != ir.OTYPE {
+ base.Fatalf("weird Sym: %v, %v", n, n.Sym())
+ }
+
+ // Don't export predeclared declarations.
+ if n.Sym().Pkg == types.BuiltinPkg || n.Sym().Pkg == ir.Pkgs.Unsafe {
+ return
+ }
+
+ if _, ok := p.declIndex[n.Sym()]; ok {
+ return
+ }
+
+ p.declIndex[n.Sym()] = ^uint64(0) // mark n present in work queue
+ p.declTodo.PushRight(n)
+}
+
+// exportWriter handles writing out individual data section chunks.
+type exportWriter struct {
+ p *iexporter
+
+ data intWriter
+ currPkg *types.Pkg
+ prevFile string
+ prevLine int64
+ prevColumn int64
+}
+
+func (p *iexporter) doDecl(n *ir.Name) {
+ w := p.newWriter()
+ w.setPkg(n.Sym().Pkg, false)
+
+ switch n.Op() {
+ case ir.ONAME:
+ switch n.Class_ {
+ case ir.PEXTERN:
+ // Variable.
+ w.tag('V')
+ w.pos(n.Pos())
+ w.typ(n.Type())
+ w.varExt(n)
+
+ case ir.PFUNC:
+ if ir.IsMethod(n) {
+ base.Fatalf("unexpected method: %v", n)
+ }
+
+ // Function.
+ w.tag('F')
+ w.pos(n.Pos())
+ w.signature(n.Type())
+ w.funcExt(n)
+
+ default:
+ base.Fatalf("unexpected class: %v, %v", n, n.Class_)
+ }
+
+ case ir.OLITERAL:
+ // Constant.
+ // TODO(mdempsky): Do we still need this typecheck? If so, why?
+ n = Expr(n).(*ir.Name)
+ w.tag('C')
+ w.pos(n.Pos())
+ w.value(n.Type(), n.Val())
+
+ case ir.OTYPE:
+ if types.IsDotAlias(n.Sym()) {
+ // Alias.
+ w.tag('A')
+ w.pos(n.Pos())
+ w.typ(n.Type())
+ break
+ }
+
+ // Defined type.
+ w.tag('T')
+ w.pos(n.Pos())
+
+ underlying := n.Type().Underlying()
+ if underlying == types.ErrorType.Underlying() {
+ // For "type T error", use error as the
+ // underlying type instead of error's own
+ // underlying anonymous interface. This
+ // ensures consistency with how importers may
+ // declare error (e.g., go/types uses nil Pkg
+ // for predeclared objects).
+ underlying = types.ErrorType
+ }
+ w.typ(underlying)
+
+ t := n.Type()
+ if t.IsInterface() {
+ w.typeExt(t)
+ break
+ }
+
+ ms := t.Methods()
+ w.uint64(uint64(ms.Len()))
+ for _, m := range ms.Slice() {
+ w.pos(m.Pos)
+ w.selector(m.Sym)
+ w.param(m.Type.Recv())
+ w.signature(m.Type)
+ }
+
+ w.typeExt(t)
+ for _, m := range ms.Slice() {
+ w.methExt(m)
+ }
+
+ default:
+ base.Fatalf("unexpected node: %v", n)
+ }
+
+ w.finish("dcl", p.declIndex, n.Sym())
+}
+
+func (w *exportWriter) tag(tag byte) {
+ w.data.WriteByte(tag)
+}
+
+func (w *exportWriter) finish(what string, index map[*types.Sym]uint64, sym *types.Sym) {
+ off := w.flush()
+ if *base.Flag.LowerV {
+ fmt.Printf("export: %v %v %v\n", what, off, sym)
+ }
+ index[sym] = off
+}
+
+func (p *iexporter) doInline(f *ir.Name) {
+ w := p.newWriter()
+ w.setPkg(fnpkg(f), false)
+
+ w.stmtList(ir.Nodes(f.Func.Inl.Body))
+
+ w.finish("inl", p.inlineIndex, f.Sym())
+}
+
+func (w *exportWriter) pos(pos src.XPos) {
+ p := base.Ctxt.PosTable.Pos(pos)
+ file := p.Base().AbsFilename()
+ line := int64(p.RelLine())
+ column := int64(p.RelCol())
+
+ // Encode position relative to the last position: column
+ // delta, then line delta, then file name. We reserve the
+ // bottom bit of the column and line deltas to encode whether
+ // the remaining fields are present.
+ //
+ // Note: Because data objects may be read out of order (or not
+ // at all), we can only apply delta encoding within a single
+ // object. This is handled implicitly by tracking prevFile,
+ // prevLine, and prevColumn as fields of exportWriter.
+
+ deltaColumn := (column - w.prevColumn) << 1
+ deltaLine := (line - w.prevLine) << 1
+
+ if file != w.prevFile {
+ deltaLine |= 1
+ }
+ if deltaLine != 0 {
+ deltaColumn |= 1
+ }
+
+ w.int64(deltaColumn)
+ if deltaColumn&1 != 0 {
+ w.int64(deltaLine)
+ if deltaLine&1 != 0 {
+ w.string(file)
+ }
+ }
+
+ w.prevFile = file
+ w.prevLine = line
+ w.prevColumn = column
+}
+
+func (w *exportWriter) pkg(pkg *types.Pkg) {
+ // Ensure any referenced packages are declared in the main index.
+ w.p.allPkgs[pkg] = true
+
+ w.string(pkg.Path)
+}
+
+func (w *exportWriter) qualifiedIdent(n ir.Node) {
+ // Ensure any referenced declarations are written out too.
+ w.p.pushDecl(n.Name())
+
+ s := n.Sym()
+ w.string(s.Name)
+ w.pkg(s.Pkg)
+}
+
+func (w *exportWriter) selector(s *types.Sym) {
+ if w.currPkg == nil {
+ base.Fatalf("missing currPkg")
+ }
+
+ // Method selectors are rewritten into method symbols (of the
+ // form T.M) during typechecking, but we want to write out
+ // just the bare method name.
+ name := s.Name
+ if i := strings.LastIndex(name, "."); i >= 0 {
+ name = name[i+1:]
+ } else {
+ pkg := w.currPkg
+ if types.IsExported(name) {
+ pkg = types.LocalPkg
+ }
+ if s.Pkg != pkg {
+ base.Fatalf("package mismatch in selector: %v in package %q, but want %q", s, s.Pkg.Path, pkg.Path)
+ }
+ }
+
+ w.string(name)
+}
+
+func (w *exportWriter) typ(t *types.Type) {
+ w.data.uint64(w.p.typOff(t))
+}
+
+func (p *iexporter) newWriter() *exportWriter {
+ return &exportWriter{p: p}
+}
+
+func (w *exportWriter) flush() uint64 {
+ off := uint64(w.p.data0.Len())
+ io.Copy(&w.p.data0, &w.data)
+ return off
+}
+
+func (p *iexporter) typOff(t *types.Type) uint64 {
+ off, ok := p.typIndex[t]
+ if !ok {
+ w := p.newWriter()
+ w.doTyp(t)
+ rawOff := w.flush()
+ if *base.Flag.LowerV {
+ fmt.Printf("export: typ %v %v\n", rawOff, t)
+ }
+ off = predeclReserved + rawOff
+ p.typIndex[t] = off
+ }
+ return off
+}
+
+func (w *exportWriter) startType(k itag) {
+ w.data.uint64(uint64(k))
+}
+
+func (w *exportWriter) doTyp(t *types.Type) {
+ if t.Sym() != nil {
+ if t.Sym().Pkg == types.BuiltinPkg || t.Sym().Pkg == ir.Pkgs.Unsafe {
+ base.Fatalf("builtin type missing from typIndex: %v", t)
+ }
+
+ w.startType(definedType)
+ w.qualifiedIdent(t.Obj().(*ir.Name))
+ return
+ }
+
+ switch t.Kind() {
+ case types.TPTR:
+ w.startType(pointerType)
+ w.typ(t.Elem())
+
+ case types.TSLICE:
+ w.startType(sliceType)
+ w.typ(t.Elem())
+
+ case types.TARRAY:
+ w.startType(arrayType)
+ w.uint64(uint64(t.NumElem()))
+ w.typ(t.Elem())
+
+ case types.TCHAN:
+ w.startType(chanType)
+ w.uint64(uint64(t.ChanDir()))
+ w.typ(t.Elem())
+
+ case types.TMAP:
+ w.startType(mapType)
+ w.typ(t.Key())
+ w.typ(t.Elem())
+
+ case types.TFUNC:
+ w.startType(signatureType)
+ w.setPkg(t.Pkg(), true)
+ w.signature(t)
+
+ case types.TSTRUCT:
+ w.startType(structType)
+ w.setPkg(t.Pkg(), true)
+
+ w.uint64(uint64(t.NumFields()))
+ for _, f := range t.FieldSlice() {
+ w.pos(f.Pos)
+ w.selector(f.Sym)
+ w.typ(f.Type)
+ w.bool(f.Embedded != 0)
+ w.string(f.Note)
+ }
+
+ case types.TINTER:
+ var embeddeds, methods []*types.Field
+ for _, m := range t.Methods().Slice() {
+ if m.Sym != nil {
+ methods = append(methods, m)
+ } else {
+ embeddeds = append(embeddeds, m)
+ }
+ }
+
+ w.startType(interfaceType)
+ w.setPkg(t.Pkg(), true)
+
+ w.uint64(uint64(len(embeddeds)))
+ for _, f := range embeddeds {
+ w.pos(f.Pos)
+ w.typ(f.Type)
+ }
+
+ w.uint64(uint64(len(methods)))
+ for _, f := range methods {
+ w.pos(f.Pos)
+ w.selector(f.Sym)
+ w.signature(f.Type)
+ }
+
+ default:
+ base.Fatalf("unexpected type: %v", t)
+ }
+}
+
+func (w *exportWriter) setPkg(pkg *types.Pkg, write bool) {
+ if pkg == types.NoPkg {
+ base.Fatalf("missing pkg")
+ }
+
+ if write {
+ w.pkg(pkg)
+ }
+
+ w.currPkg = pkg
+}
+
+func (w *exportWriter) signature(t *types.Type) {
+ w.paramList(t.Params().FieldSlice())
+ w.paramList(t.Results().FieldSlice())
+ if n := t.Params().NumFields(); n > 0 {
+ w.bool(t.Params().Field(n - 1).IsDDD())
+ }
+}
+
+func (w *exportWriter) paramList(fs []*types.Field) {
+ w.uint64(uint64(len(fs)))
+ for _, f := range fs {
+ w.param(f)
+ }
+}
+
+func (w *exportWriter) param(f *types.Field) {
+ w.pos(f.Pos)
+ w.localIdent(types.OrigSym(f.Sym), 0)
+ w.typ(f.Type)
+}
+
+func constTypeOf(typ *types.Type) constant.Kind {
+ switch typ {
+ case types.UntypedInt, types.UntypedRune:
+ return constant.Int
+ case types.UntypedFloat:
+ return constant.Float
+ case types.UntypedComplex:
+ return constant.Complex
+ }
+
+ switch typ.Kind() {
+ case types.TBOOL:
+ return constant.Bool
+ case types.TSTRING:
+ return constant.String
+ case types.TINT, types.TINT8, types.TINT16, types.TINT32, types.TINT64,
+ types.TUINT, types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINTPTR:
+ return constant.Int
+ case types.TFLOAT32, types.TFLOAT64:
+ return constant.Float
+ case types.TCOMPLEX64, types.TCOMPLEX128:
+ return constant.Complex
+ }
+
+ base.Fatalf("unexpected constant type: %v", typ)
+ return 0
+}
+
+func (w *exportWriter) value(typ *types.Type, v constant.Value) {
+ ir.AssertValidTypeForConst(typ, v)
+ w.typ(typ)
+
+ // Each type has only one admissible constant representation,
+ // so we could type switch directly on v.U here. However,
+ // switching on the type increases symmetry with import logic
+ // and provides a useful consistency check.
+
+ switch constTypeOf(typ) {
+ case constant.Bool:
+ w.bool(constant.BoolVal(v))
+ case constant.String:
+ w.string(constant.StringVal(v))
+ case constant.Int:
+ w.mpint(v, typ)
+ case constant.Float:
+ w.mpfloat(v, typ)
+ case constant.Complex:
+ w.mpfloat(constant.Real(v), typ)
+ w.mpfloat(constant.Imag(v), typ)
+ }
+}
+
+func intSize(typ *types.Type) (signed bool, maxBytes uint) {
+ if typ.IsUntyped() {
+ return true, ir.ConstPrec / 8
+ }
+
+ switch typ.Kind() {
+ case types.TFLOAT32, types.TCOMPLEX64:
+ return true, 3
+ case types.TFLOAT64, types.TCOMPLEX128:
+ return true, 7
+ }
+
+ signed = typ.IsSigned()
+ maxBytes = uint(typ.Size())
+
+ // The go/types API doesn't expose sizes to importers, so they
+ // don't know how big these types are.
+ switch typ.Kind() {
+ case types.TINT, types.TUINT, types.TUINTPTR:
+ maxBytes = 8
+ }
+
+ return
+}
+
+// mpint exports a multi-precision integer.
+//
+// For unsigned types, small values are written out as a single
+// byte. Larger values are written out as a length-prefixed big-endian
+// byte string, where the length prefix is encoded as its complement.
+// For example, bytes 0, 1, and 2 directly represent the integer
+// values 0, 1, and 2; while bytes 255, 254, and 253 indicate a 1-,
+// 2-, and 3-byte big-endian string follow.
+//
+// Encoding for signed types use the same general approach as for
+// unsigned types, except small values use zig-zag encoding and the
+// bottom bit of length prefix byte for large values is reserved as a
+// sign bit.
+//
+// The exact boundary between small and large encodings varies
+// according to the maximum number of bytes needed to encode a value
+// of type typ. As a special case, 8-bit types are always encoded as a
+// single byte.
+//
+// TODO(mdempsky): Is this level of complexity really worthwhile?
+func (w *exportWriter) mpint(x constant.Value, typ *types.Type) {
+ signed, maxBytes := intSize(typ)
+
+ negative := constant.Sign(x) < 0
+ if !signed && negative {
+ base.Fatalf("negative unsigned integer; type %v, value %v", typ, x)
+ }
+
+ b := constant.Bytes(x) // little endian
+ for i, j := 0, len(b)-1; i < j; i, j = i+1, j-1 {
+ b[i], b[j] = b[j], b[i]
+ }
+
+ if len(b) > 0 && b[0] == 0 {
+ base.Fatalf("leading zeros")
+ }
+ if uint(len(b)) > maxBytes {
+ base.Fatalf("bad mpint length: %d > %d (type %v, value %v)", len(b), maxBytes, typ, x)
+ }
+
+ maxSmall := 256 - maxBytes
+ if signed {
+ maxSmall = 256 - 2*maxBytes
+ }
+ if maxBytes == 1 {
+ maxSmall = 256
+ }
+
+ // Check if x can use small value encoding.
+ if len(b) <= 1 {
+ var ux uint
+ if len(b) == 1 {
+ ux = uint(b[0])
+ }
+ if signed {
+ ux <<= 1
+ if negative {
+ ux--
+ }
+ }
+ if ux < maxSmall {
+ w.data.WriteByte(byte(ux))
+ return
+ }
+ }
+
+ n := 256 - uint(len(b))
+ if signed {
+ n = 256 - 2*uint(len(b))
+ if negative {
+ n |= 1
+ }
+ }
+ if n < maxSmall || n >= 256 {
+ base.Fatalf("encoding mistake: %d, %v, %v => %d", len(b), signed, negative, n)
+ }
+
+ w.data.WriteByte(byte(n))
+ w.data.Write(b)
+}
+
+// mpfloat exports a multi-precision floating point number.
+//
+// The number's value is decomposed into mantissa × 2**exponent, where
+// mantissa is an integer. The value is written out as mantissa (as a
+// multi-precision integer) and then the exponent, except exponent is
+// omitted if mantissa is zero.
+func (w *exportWriter) mpfloat(v constant.Value, typ *types.Type) {
+ f := ir.BigFloat(v)
+ if f.IsInf() {
+ base.Fatalf("infinite constant")
+ }
+
+ // Break into f = mant × 2**exp, with 0.5 <= mant < 1.
+ var mant big.Float
+ exp := int64(f.MantExp(&mant))
+
+ // Scale so that mant is an integer.
+ prec := mant.MinPrec()
+ mant.SetMantExp(&mant, int(prec))
+ exp -= int64(prec)
+
+ manti, acc := mant.Int(nil)
+ if acc != big.Exact {
+ base.Fatalf("mantissa scaling failed for %f (%s)", f, acc)
+ }
+ w.mpint(makeInt(manti), typ)
+ if manti.Sign() != 0 {
+ w.int64(exp)
+ }
+}
+
+func (w *exportWriter) bool(b bool) bool {
+ var x uint64
+ if b {
+ x = 1
+ }
+ w.uint64(x)
+ return b
+}
+
+func (w *exportWriter) int64(x int64) { w.data.int64(x) }
+func (w *exportWriter) uint64(x uint64) { w.data.uint64(x) }
+func (w *exportWriter) string(s string) { w.uint64(w.p.stringOff(s)) }
+
+// Compiler-specific extensions.
+
+func (w *exportWriter) varExt(n ir.Node) {
+ w.linkname(n.Sym())
+ w.symIdx(n.Sym())
+}
+
+func (w *exportWriter) funcExt(n *ir.Name) {
+ w.linkname(n.Sym())
+ w.symIdx(n.Sym())
+
+ // Escape analysis.
+ for _, fs := range &types.RecvsParams {
+ for _, f := range fs(n.Type()).FieldSlice() {
+ w.string(f.Note)
+ }
+ }
+
+ // Inline body.
+ if n.Func.Inl != nil {
+ w.uint64(1 + uint64(n.Func.Inl.Cost))
+ if n.Func.ExportInline() {
+ w.p.doInline(n)
+ }
+
+ // Endlineno for inlined function.
+ w.pos(n.Func.Endlineno)
+ } else {
+ w.uint64(0)
+ }
+}
+
+func (w *exportWriter) methExt(m *types.Field) {
+ w.bool(m.Nointerface())
+ w.funcExt(m.Nname.(*ir.Name))
+}
+
+func (w *exportWriter) linkname(s *types.Sym) {
+ w.string(s.Linkname)
+}
+
+func (w *exportWriter) symIdx(s *types.Sym) {
+ lsym := s.Linksym()
+ if lsym.PkgIdx > goobj.PkgIdxSelf || (lsym.PkgIdx == goobj.PkgIdxInvalid && !lsym.Indexed()) || s.Linkname != "" {
+ // Don't export index for non-package symbols, linkname'd symbols,
+ // and symbols without an index. They can only be referenced by
+ // name.
+ w.int64(-1)
+ } else {
+ // For a defined symbol, export its index.
+ // For re-exporting an imported symbol, pass its index through.
+ w.int64(int64(lsym.SymIdx))
+ }
+}
+
+func (w *exportWriter) typeExt(t *types.Type) {
+ // Export whether this type is marked notinheap.
+ w.bool(t.NotInHeap())
+ // For type T, export the index of type descriptor symbols of T and *T.
+ if i, ok := typeSymIdx[t]; ok {
+ w.int64(i[0])
+ w.int64(i[1])
+ return
+ }
+ w.symIdx(types.TypeSym(t))
+ w.symIdx(types.TypeSym(t.PtrTo()))
+}
+
+// Inline bodies.
+
+func (w *exportWriter) stmtList(list ir.Nodes) {
+ for _, n := range list {
+ w.node(n)
+ }
+ w.op(ir.OEND)
+}
+
+func (w *exportWriter) node(n ir.Node) {
+ if ir.OpPrec[n.Op()] < 0 {
+ w.stmt(n)
+ } else {
+ w.expr(n)
+ }
+}
+
+// Caution: stmt will emit more than one node for statement nodes n that have a non-empty
+// n.Ninit and where n cannot have a natural init section (such as in "if", "for", etc.).
+func (w *exportWriter) stmt(n ir.Node) {
+ if len(n.Init()) > 0 && !ir.StmtWithInit(n.Op()) {
+ // can't use stmtList here since we don't want the final OEND
+ for _, n := range n.Init() {
+ w.stmt(n)
+ }
+ }
+
+ switch n.Op() {
+ case ir.OBLOCK:
+ // No OBLOCK in export data.
+ // Inline content into this statement list,
+ // like the init list above.
+ // (At the moment neither the parser nor the typechecker
+ // generate OBLOCK nodes except to denote an empty
+ // function body, although that may change.)
+ n := n.(*ir.BlockStmt)
+ for _, n := range n.List {
+ w.stmt(n)
+ }
+
+ case ir.ODCL:
+ n := n.(*ir.Decl)
+ w.op(ir.ODCL)
+ w.pos(n.X.Pos())
+ w.localName(n.X.(*ir.Name))
+ w.typ(n.X.Type())
+
+ case ir.OAS:
+ // Don't export "v = <N>" initializing statements, hope they're always
+ // preceded by the DCL which will be re-parsed and typecheck to reproduce
+ // the "v = <N>" again.
+ n := n.(*ir.AssignStmt)
+ if n.Y != nil {
+ w.op(ir.OAS)
+ w.pos(n.Pos())
+ w.expr(n.X)
+ w.expr(n.Y)
+ }
+
+ case ir.OASOP:
+ n := n.(*ir.AssignOpStmt)
+ w.op(ir.OASOP)
+ w.pos(n.Pos())
+ w.op(n.AsOp)
+ w.expr(n.X)
+ if w.bool(!n.IncDec) {
+ w.expr(n.Y)
+ }
+
+ case ir.OAS2, ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2MAPR, ir.OAS2RECV:
+ n := n.(*ir.AssignListStmt)
+ w.op(ir.OAS2)
+ w.pos(n.Pos())
+ w.exprList(n.Lhs)
+ w.exprList(n.Rhs)
+
+ case ir.ORETURN:
+ n := n.(*ir.ReturnStmt)
+ w.op(ir.ORETURN)
+ w.pos(n.Pos())
+ w.exprList(n.Results)
+
+ // case ORETJMP:
+ // unreachable - generated by compiler for trampolin routines
+
+ case ir.OGO, ir.ODEFER:
+ n := n.(*ir.GoDeferStmt)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.expr(n.Call)
+
+ case ir.OIF:
+ n := n.(*ir.IfStmt)
+ w.op(ir.OIF)
+ w.pos(n.Pos())
+ w.stmtList(n.Init())
+ w.expr(n.Cond)
+ w.stmtList(n.Body)
+ w.stmtList(n.Else)
+
+ case ir.OFOR:
+ n := n.(*ir.ForStmt)
+ w.op(ir.OFOR)
+ w.pos(n.Pos())
+ w.stmtList(n.Init())
+ w.exprsOrNil(n.Cond, n.Post)
+ w.stmtList(n.Body)
+
+ case ir.ORANGE:
+ n := n.(*ir.RangeStmt)
+ w.op(ir.ORANGE)
+ w.pos(n.Pos())
+ w.stmtList(n.Vars)
+ w.expr(n.X)
+ w.stmtList(n.Body)
+
+ case ir.OSELECT:
+ n := n.(*ir.SelectStmt)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.stmtList(n.Init())
+ w.exprsOrNil(nil, nil) // TODO(rsc): Delete (and fix importer).
+ w.caseList(n)
+
+ case ir.OSWITCH:
+ n := n.(*ir.SwitchStmt)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.stmtList(n.Init())
+ w.exprsOrNil(n.Tag, nil)
+ w.caseList(n)
+
+ // case OCASE:
+ // handled by caseList
+
+ case ir.OFALL:
+ n := n.(*ir.BranchStmt)
+ w.op(ir.OFALL)
+ w.pos(n.Pos())
+
+ case ir.OBREAK, ir.OCONTINUE, ir.OGOTO, ir.OLABEL:
+ w.op(n.Op())
+ w.pos(n.Pos())
+ label := ""
+ if sym := n.Sym(); sym != nil {
+ label = sym.Name
+ }
+ w.string(label)
+
+ default:
+ base.Fatalf("exporter: CANNOT EXPORT: %v\nPlease notify gri@\n", n.Op())
+ }
+}
+
+func isNamedTypeSwitch(n ir.Node) bool {
+ if n.Op() != ir.OSWITCH {
+ return false
+ }
+ sw := n.(*ir.SwitchStmt)
+ if sw.Tag == nil || sw.Tag.Op() != ir.OTYPESW {
+ return false
+ }
+ guard := sw.Tag.(*ir.TypeSwitchGuard)
+ return guard.Tag != nil
+}
+
+func (w *exportWriter) caseList(sw ir.Node) {
+ namedTypeSwitch := isNamedTypeSwitch(sw)
+
+ var cases []ir.Node
+ if sw.Op() == ir.OSWITCH {
+ cases = sw.(*ir.SwitchStmt).Cases
+ } else {
+ cases = sw.(*ir.SelectStmt).Cases
+ }
+ w.uint64(uint64(len(cases)))
+ for _, cas := range cases {
+ cas := cas.(*ir.CaseStmt)
+ w.pos(cas.Pos())
+ w.stmtList(cas.List)
+ if namedTypeSwitch {
+ w.localName(cas.Vars[0].(*ir.Name))
+ }
+ w.stmtList(cas.Body)
+ }
+}
+
+func (w *exportWriter) exprList(list ir.Nodes) {
+ for _, n := range list {
+ w.expr(n)
+ }
+ w.op(ir.OEND)
+}
+
+func simplifyForExport(n ir.Node) ir.Node {
+ switch n.Op() {
+ case ir.OPAREN:
+ n := n.(*ir.ParenExpr)
+ return simplifyForExport(n.X)
+ case ir.ODEREF:
+ n := n.(*ir.StarExpr)
+ if n.Implicit() {
+ return simplifyForExport(n.X)
+ }
+ case ir.OADDR:
+ n := n.(*ir.AddrExpr)
+ if n.Implicit() {
+ return simplifyForExport(n.X)
+ }
+ case ir.ODOT, ir.ODOTPTR:
+ n := n.(*ir.SelectorExpr)
+ if n.Implicit() {
+ return simplifyForExport(n.X)
+ }
+ }
+ return n
+}
+
+func (w *exportWriter) expr(n ir.Node) {
+ n = simplifyForExport(n)
+ switch n.Op() {
+ // expressions
+ // (somewhat closely following the structure of exprfmt in fmt.go)
+ case ir.ONIL:
+ n := n.(*ir.NilExpr)
+ if !n.Type().HasNil() {
+ base.Fatalf("unexpected type for nil: %v", n.Type())
+ }
+ w.op(ir.ONIL)
+ w.pos(n.Pos())
+ w.typ(n.Type())
+
+ case ir.OLITERAL:
+ w.op(ir.OLITERAL)
+ w.pos(n.Pos())
+ w.value(n.Type(), n.Val())
+
+ case ir.OMETHEXPR:
+ // Special case: explicit name of func (*T) method(...) is turned into pkg.(*T).method,
+ // but for export, this should be rendered as (*pkg.T).meth.
+ // These nodes have the special property that they are names with a left OTYPE and a right ONAME.
+ n := n.(*ir.MethodExpr)
+ w.op(ir.OXDOT)
+ w.pos(n.Pos())
+ w.op(ir.OTYPE)
+ w.typ(n.T) // n.Left.Op == OTYPE
+ w.selector(n.Method.Sym)
+
+ case ir.ONAME:
+ // Package scope name.
+ n := n.(*ir.Name)
+ if (n.Class_ == ir.PEXTERN || n.Class_ == ir.PFUNC) && !ir.IsBlank(n) {
+ w.op(ir.ONONAME)
+ w.qualifiedIdent(n)
+ break
+ }
+
+ // Function scope name.
+ w.op(ir.ONAME)
+ w.localName(n)
+
+ // case OPACK, ONONAME:
+ // should have been resolved by typechecking - handled by default case
+
+ case ir.OTYPE:
+ w.op(ir.OTYPE)
+ w.typ(n.Type())
+
+ case ir.OTYPESW:
+ n := n.(*ir.TypeSwitchGuard)
+ w.op(ir.OTYPESW)
+ w.pos(n.Pos())
+ var s *types.Sym
+ if n.Tag != nil {
+ if n.Tag.Op() != ir.ONONAME {
+ base.Fatalf("expected ONONAME, got %v", n.Tag)
+ }
+ s = n.Tag.Sym()
+ }
+ w.localIdent(s, 0) // declared pseudo-variable, if any
+ w.exprsOrNil(n.X, nil)
+
+ // case OTARRAY, OTMAP, OTCHAN, OTSTRUCT, OTINTER, OTFUNC:
+ // should have been resolved by typechecking - handled by default case
+
+ // case OCLOSURE:
+ // unimplemented - handled by default case
+
+ // case OCOMPLIT:
+ // should have been resolved by typechecking - handled by default case
+
+ case ir.OPTRLIT:
+ n := n.(*ir.AddrExpr)
+ w.op(ir.OADDR)
+ w.pos(n.Pos())
+ w.expr(n.X)
+
+ case ir.OSTRUCTLIT:
+ n := n.(*ir.CompLitExpr)
+ w.op(ir.OSTRUCTLIT)
+ w.pos(n.Pos())
+ w.typ(n.Type())
+ w.fieldList(n.List) // special handling of field names
+
+ case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT:
+ n := n.(*ir.CompLitExpr)
+ w.op(ir.OCOMPLIT)
+ w.pos(n.Pos())
+ w.typ(n.Type())
+ w.exprList(n.List)
+
+ case ir.OKEY:
+ n := n.(*ir.KeyExpr)
+ w.op(ir.OKEY)
+ w.pos(n.Pos())
+ w.exprsOrNil(n.Key, n.Value)
+
+ // case OSTRUCTKEY:
+ // unreachable - handled in case OSTRUCTLIT by elemList
+
+ case ir.OCALLPART:
+ // An OCALLPART is an OXDOT before type checking.
+ n := n.(*ir.CallPartExpr)
+ w.op(ir.OXDOT)
+ w.pos(n.Pos())
+ w.expr(n.X)
+ w.selector(n.Method.Sym)
+
+ case ir.OXDOT, ir.ODOT, ir.ODOTPTR, ir.ODOTINTER, ir.ODOTMETH:
+ n := n.(*ir.SelectorExpr)
+ w.op(ir.OXDOT)
+ w.pos(n.Pos())
+ w.expr(n.X)
+ w.selector(n.Sel)
+
+ case ir.ODOTTYPE, ir.ODOTTYPE2:
+ n := n.(*ir.TypeAssertExpr)
+ w.op(ir.ODOTTYPE)
+ w.pos(n.Pos())
+ w.expr(n.X)
+ w.typ(n.Type())
+
+ case ir.OINDEX, ir.OINDEXMAP:
+ n := n.(*ir.IndexExpr)
+ w.op(ir.OINDEX)
+ w.pos(n.Pos())
+ w.expr(n.X)
+ w.expr(n.Index)
+
+ case ir.OSLICE, ir.OSLICESTR, ir.OSLICEARR:
+ n := n.(*ir.SliceExpr)
+ w.op(ir.OSLICE)
+ w.pos(n.Pos())
+ w.expr(n.X)
+ low, high, _ := n.SliceBounds()
+ w.exprsOrNil(low, high)
+
+ case ir.OSLICE3, ir.OSLICE3ARR:
+ n := n.(*ir.SliceExpr)
+ w.op(ir.OSLICE3)
+ w.pos(n.Pos())
+ w.expr(n.X)
+ low, high, max := n.SliceBounds()
+ w.exprsOrNil(low, high)
+ w.expr(max)
+
+ case ir.OCOPY, ir.OCOMPLEX:
+ // treated like other builtin calls (see e.g., OREAL)
+ n := n.(*ir.BinaryExpr)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.expr(n.X)
+ w.expr(n.Y)
+ w.op(ir.OEND)
+
+ case ir.OCONV, ir.OCONVIFACE, ir.OCONVNOP, ir.OBYTES2STR, ir.ORUNES2STR, ir.OSTR2BYTES, ir.OSTR2RUNES, ir.ORUNESTR:
+ n := n.(*ir.ConvExpr)
+ w.op(ir.OCONV)
+ w.pos(n.Pos())
+ w.expr(n.X)
+ w.typ(n.Type())
+
+ case ir.OREAL, ir.OIMAG, ir.OCAP, ir.OCLOSE, ir.OLEN, ir.ONEW, ir.OPANIC:
+ n := n.(*ir.UnaryExpr)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.expr(n.X)
+ w.op(ir.OEND)
+
+ case ir.OAPPEND, ir.ODELETE, ir.ORECOVER, ir.OPRINT, ir.OPRINTN:
+ n := n.(*ir.CallExpr)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.exprList(n.Args) // emits terminating OEND
+ // only append() calls may contain '...' arguments
+ if n.Op() == ir.OAPPEND {
+ w.bool(n.IsDDD)
+ } else if n.IsDDD {
+ base.Fatalf("exporter: unexpected '...' with %v call", n.Op())
+ }
+
+ case ir.OCALL, ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER, ir.OGETG:
+ n := n.(*ir.CallExpr)
+ w.op(ir.OCALL)
+ w.pos(n.Pos())
+ w.stmtList(n.Init())
+ w.expr(n.X)
+ w.exprList(n.Args)
+ w.bool(n.IsDDD)
+
+ case ir.OMAKEMAP, ir.OMAKECHAN, ir.OMAKESLICE:
+ n := n.(*ir.MakeExpr)
+ w.op(n.Op()) // must keep separate from OMAKE for importer
+ w.pos(n.Pos())
+ w.typ(n.Type())
+ switch {
+ default:
+ // empty list
+ w.op(ir.OEND)
+ case n.Cap != nil:
+ w.expr(n.Len)
+ w.expr(n.Cap)
+ w.op(ir.OEND)
+ case n.Len != nil && (n.Op() == ir.OMAKESLICE || !n.Len.Type().IsUntyped()):
+ w.expr(n.Len)
+ w.op(ir.OEND)
+ }
+
+ // unary expressions
+ case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT, ir.ORECV:
+ n := n.(*ir.UnaryExpr)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.expr(n.X)
+
+ case ir.OADDR:
+ n := n.(*ir.AddrExpr)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.expr(n.X)
+
+ case ir.ODEREF:
+ n := n.(*ir.StarExpr)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.expr(n.X)
+
+ case ir.OSEND:
+ n := n.(*ir.SendStmt)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.expr(n.Chan)
+ w.expr(n.Value)
+
+ // binary expressions
+ case ir.OADD, ir.OAND, ir.OANDNOT, ir.ODIV, ir.OEQ, ir.OGE, ir.OGT, ir.OLE, ir.OLT,
+ ir.OLSH, ir.OMOD, ir.OMUL, ir.ONE, ir.OOR, ir.ORSH, ir.OSUB, ir.OXOR:
+ n := n.(*ir.BinaryExpr)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.expr(n.X)
+ w.expr(n.Y)
+
+ case ir.OANDAND, ir.OOROR:
+ n := n.(*ir.LogicalExpr)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.expr(n.X)
+ w.expr(n.Y)
+
+ case ir.OADDSTR:
+ n := n.(*ir.AddStringExpr)
+ w.op(ir.OADDSTR)
+ w.pos(n.Pos())
+ w.exprList(n.List)
+
+ case ir.ODCLCONST:
+ // if exporting, DCLCONST should just be removed as its usage
+ // has already been replaced with literals
+
+ default:
+ base.Fatalf("cannot export %v (%d) node\n"+
+ "\t==> please file an issue and assign to gri@", n.Op(), int(n.Op()))
+ }
+}
+
+func (w *exportWriter) op(op ir.Op) {
+ w.uint64(uint64(op))
+}
+
+func (w *exportWriter) exprsOrNil(a, b ir.Node) {
+ ab := 0
+ if a != nil {
+ ab |= 1
+ }
+ if b != nil {
+ ab |= 2
+ }
+ w.uint64(uint64(ab))
+ if ab&1 != 0 {
+ w.expr(a)
+ }
+ if ab&2 != 0 {
+ w.node(b)
+ }
+}
+
+func (w *exportWriter) fieldList(list ir.Nodes) {
+ w.uint64(uint64(len(list)))
+ for _, n := range list {
+ n := n.(*ir.StructKeyExpr)
+ w.selector(n.Field)
+ w.expr(n.Value)
+ }
+}
+
+func (w *exportWriter) localName(n *ir.Name) {
+ // Escape analysis happens after inline bodies are saved, but
+ // we're using the same ONAME nodes, so we might still see
+ // PAUTOHEAP here.
+ //
+ // Check for Stackcopy to identify PAUTOHEAP that came from
+ // PPARAM/PPARAMOUT, because we only want to include vargen in
+ // non-param names.
+ var v int32
+ if n.Class_ == ir.PAUTO || (n.Class_ == ir.PAUTOHEAP && n.Name().Stackcopy == nil) {
+ v = n.Name().Vargen
+ }
+
+ w.localIdent(n.Sym(), v)
+}
+
+func (w *exportWriter) localIdent(s *types.Sym, v int32) {
+ // Anonymous parameters.
+ if s == nil {
+ w.string("")
+ return
+ }
+
+ name := s.Name
+ if name == "_" {
+ w.string("_")
+ return
+ }
+
+ // TODO(mdempsky): Fix autotmp hack.
+ if i := strings.LastIndex(name, "."); i >= 0 && !strings.HasPrefix(name, ".autotmp_") {
+ base.Fatalf("unexpected dot in identifier: %v", name)
+ }
+
+ if v > 0 {
+ if strings.Contains(name, "·") {
+ base.Fatalf("exporter: unexpected · in symbol name")
+ }
+ name = fmt.Sprintf("%s·%d", name, v)
+ }
+
+ if !types.IsExported(name) && s.Pkg != w.currPkg {
+ base.Fatalf("weird package in name: %v => %v, not %q", s, name, w.currPkg.Path)
+ }
+
+ w.string(name)
+}
+
+type intWriter struct {
+ bytes.Buffer
+}
+
+func (w *intWriter) int64(x int64) {
+ var buf [binary.MaxVarintLen64]byte
+ n := binary.PutVarint(buf[:], x)
+ w.Write(buf[:n])
+}
+
+func (w *intWriter) uint64(x uint64) {
+ var buf [binary.MaxVarintLen64]byte
+ n := binary.PutUvarint(buf[:], x)
+ w.Write(buf[:n])
+}
diff --git a/src/cmd/compile/internal/typecheck/iimport.go b/src/cmd/compile/internal/typecheck/iimport.go
new file mode 100644
index 0000000000..ab43d4f71b
--- /dev/null
+++ b/src/cmd/compile/internal/typecheck/iimport.go
@@ -0,0 +1,1142 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Indexed package import.
+// See iexport.go for the export data format.
+
+package typecheck
+
+import (
+ "encoding/binary"
+ "fmt"
+ "go/constant"
+ "io"
+ "math/big"
+ "os"
+ "strings"
+
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/types"
+ "cmd/internal/bio"
+ "cmd/internal/goobj"
+ "cmd/internal/obj"
+ "cmd/internal/src"
+)
+
+// An iimporterAndOffset identifies an importer and an offset within
+// its data section.
+type iimporterAndOffset struct {
+ p *iimporter
+ off uint64
+}
+
+var (
+ // DeclImporter maps from imported identifiers to an importer
+ // and offset where that identifier's declaration can be read.
+ DeclImporter = map[*types.Sym]iimporterAndOffset{}
+
+ // inlineImporter is like declImporter, but for inline bodies
+ // for function and method symbols.
+ inlineImporter = map[*types.Sym]iimporterAndOffset{}
+)
+
+func expandDecl(n ir.Node) ir.Node {
+ if n, ok := n.(*ir.Name); ok {
+ return n
+ }
+
+ id := n.(*ir.Ident)
+ if n := id.Sym().PkgDef(); n != nil {
+ return n.(*ir.Name)
+ }
+
+ r := importReaderFor(id.Sym(), DeclImporter)
+ if r == nil {
+ // Can happen if user tries to reference an undeclared name.
+ return n
+ }
+
+ return r.doDecl(n.Sym())
+}
+
+func ImportBody(fn *ir.Func) {
+ if fn.Inl.Body != nil {
+ return
+ }
+
+ r := importReaderFor(fn.Nname.Sym(), inlineImporter)
+ if r == nil {
+ base.Fatalf("missing import reader for %v", fn)
+ }
+
+ r.doInline(fn)
+}
+
+func importReaderFor(sym *types.Sym, importers map[*types.Sym]iimporterAndOffset) *importReader {
+ x, ok := importers[sym]
+ if !ok {
+ return nil
+ }
+
+ return x.p.newReader(x.off, sym.Pkg)
+}
+
+type intReader struct {
+ *bio.Reader
+ pkg *types.Pkg
+}
+
+func (r *intReader) int64() int64 {
+ i, err := binary.ReadVarint(r.Reader)
+ if err != nil {
+ base.Errorf("import %q: read error: %v", r.pkg.Path, err)
+ base.ErrorExit()
+ }
+ return i
+}
+
+func (r *intReader) uint64() uint64 {
+ i, err := binary.ReadUvarint(r.Reader)
+ if err != nil {
+ base.Errorf("import %q: read error: %v", r.pkg.Path, err)
+ base.ErrorExit()
+ }
+ return i
+}
+
+func ReadImports(pkg *types.Pkg, in *bio.Reader) (fingerprint goobj.FingerprintType) {
+ ird := &intReader{in, pkg}
+
+ version := ird.uint64()
+ if version != iexportVersion {
+ base.Errorf("import %q: unknown export format version %d", pkg.Path, version)
+ base.ErrorExit()
+ }
+
+ sLen := ird.uint64()
+ dLen := ird.uint64()
+
+ // Map string (and data) section into memory as a single large
+ // string. This reduces heap fragmentation and allows
+ // returning individual substrings very efficiently.
+ data, err := mapFile(in.File(), in.Offset(), int64(sLen+dLen))
+ if err != nil {
+ base.Errorf("import %q: mapping input: %v", pkg.Path, err)
+ base.ErrorExit()
+ }
+ stringData := data[:sLen]
+ declData := data[sLen:]
+
+ in.MustSeek(int64(sLen+dLen), os.SEEK_CUR)
+
+ p := &iimporter{
+ ipkg: pkg,
+
+ pkgCache: map[uint64]*types.Pkg{},
+ posBaseCache: map[uint64]*src.PosBase{},
+ typCache: map[uint64]*types.Type{},
+
+ stringData: stringData,
+ declData: declData,
+ }
+
+ for i, pt := range predeclared() {
+ p.typCache[uint64(i)] = pt
+ }
+
+ // Declaration index.
+ for nPkgs := ird.uint64(); nPkgs > 0; nPkgs-- {
+ pkg := p.pkgAt(ird.uint64())
+ pkgName := p.stringAt(ird.uint64())
+ pkgHeight := int(ird.uint64())
+ if pkg.Name == "" {
+ pkg.Name = pkgName
+ pkg.Height = pkgHeight
+ types.NumImport[pkgName]++
+
+ // TODO(mdempsky): This belongs somewhere else.
+ pkg.Lookup("_").Def = ir.BlankNode
+ } else {
+ if pkg.Name != pkgName {
+ base.Fatalf("conflicting package names %v and %v for path %q", pkg.Name, pkgName, pkg.Path)
+ }
+ if pkg.Height != pkgHeight {
+ base.Fatalf("conflicting package heights %v and %v for path %q", pkg.Height, pkgHeight, pkg.Path)
+ }
+ }
+
+ for nSyms := ird.uint64(); nSyms > 0; nSyms-- {
+ s := pkg.Lookup(p.stringAt(ird.uint64()))
+ off := ird.uint64()
+
+ if _, ok := DeclImporter[s]; !ok {
+ DeclImporter[s] = iimporterAndOffset{p, off}
+ }
+ }
+ }
+
+ // Inline body index.
+ for nPkgs := ird.uint64(); nPkgs > 0; nPkgs-- {
+ pkg := p.pkgAt(ird.uint64())
+
+ for nSyms := ird.uint64(); nSyms > 0; nSyms-- {
+ s := pkg.Lookup(p.stringAt(ird.uint64()))
+ off := ird.uint64()
+
+ if _, ok := inlineImporter[s]; !ok {
+ inlineImporter[s] = iimporterAndOffset{p, off}
+ }
+ }
+ }
+
+ // Fingerprint.
+ _, err = io.ReadFull(in, fingerprint[:])
+ if err != nil {
+ base.Errorf("import %s: error reading fingerprint", pkg.Path)
+ base.ErrorExit()
+ }
+ return fingerprint
+}
+
+type iimporter struct {
+ ipkg *types.Pkg
+
+ pkgCache map[uint64]*types.Pkg
+ posBaseCache map[uint64]*src.PosBase
+ typCache map[uint64]*types.Type
+
+ stringData string
+ declData string
+}
+
+func (p *iimporter) stringAt(off uint64) string {
+ var x [binary.MaxVarintLen64]byte
+ n := copy(x[:], p.stringData[off:])
+
+ slen, n := binary.Uvarint(x[:n])
+ if n <= 0 {
+ base.Fatalf("varint failed")
+ }
+ spos := off + uint64(n)
+ return p.stringData[spos : spos+slen]
+}
+
+func (p *iimporter) posBaseAt(off uint64) *src.PosBase {
+ if posBase, ok := p.posBaseCache[off]; ok {
+ return posBase
+ }
+
+ file := p.stringAt(off)
+ posBase := src.NewFileBase(file, file)
+ p.posBaseCache[off] = posBase
+ return posBase
+}
+
+func (p *iimporter) pkgAt(off uint64) *types.Pkg {
+ if pkg, ok := p.pkgCache[off]; ok {
+ return pkg
+ }
+
+ pkg := p.ipkg
+ if pkgPath := p.stringAt(off); pkgPath != "" {
+ pkg = types.NewPkg(pkgPath, "")
+ }
+ p.pkgCache[off] = pkg
+ return pkg
+}
+
+// An importReader keeps state for reading an individual imported
+// object (declaration or inline body).
+type importReader struct {
+ strings.Reader
+ p *iimporter
+
+ currPkg *types.Pkg
+ prevBase *src.PosBase
+ prevLine int64
+ prevColumn int64
+}
+
+func (p *iimporter) newReader(off uint64, pkg *types.Pkg) *importReader {
+ r := &importReader{
+ p: p,
+ currPkg: pkg,
+ }
+ // (*strings.Reader).Reset wasn't added until Go 1.7, and we
+ // need to build with Go 1.4.
+ r.Reader = *strings.NewReader(p.declData[off:])
+ return r
+}
+
+func (r *importReader) string() string { return r.p.stringAt(r.uint64()) }
+func (r *importReader) posBase() *src.PosBase { return r.p.posBaseAt(r.uint64()) }
+func (r *importReader) pkg() *types.Pkg { return r.p.pkgAt(r.uint64()) }
+
+func (r *importReader) setPkg() {
+ r.currPkg = r.pkg()
+}
+
+func (r *importReader) doDecl(sym *types.Sym) *ir.Name {
+ tag := r.byte()
+ pos := r.pos()
+
+ switch tag {
+ case 'A':
+ typ := r.typ()
+
+ return importalias(r.p.ipkg, pos, sym, typ)
+
+ case 'C':
+ typ := r.typ()
+ val := r.value(typ)
+
+ return importconst(r.p.ipkg, pos, sym, typ, val)
+
+ case 'F':
+ typ := r.signature(nil)
+
+ n := importfunc(r.p.ipkg, pos, sym, typ)
+ r.funcExt(n)
+ return n
+
+ case 'T':
+ // Types can be recursive. We need to setup a stub
+ // declaration before recursing.
+ n := importtype(r.p.ipkg, pos, sym)
+ t := n.Type()
+
+ // We also need to defer width calculations until
+ // after the underlying type has been assigned.
+ types.DeferCheckSize()
+ underlying := r.typ()
+ t.SetUnderlying(underlying)
+ types.ResumeCheckSize()
+
+ if underlying.IsInterface() {
+ r.typeExt(t)
+ return n
+ }
+
+ ms := make([]*types.Field, r.uint64())
+ for i := range ms {
+ mpos := r.pos()
+ msym := r.ident()
+ recv := r.param()
+ mtyp := r.signature(recv)
+
+ fn := ir.NewFunc(mpos)
+ fn.SetType(mtyp)
+ m := ir.NewFuncNameAt(mpos, ir.MethodSym(recv.Type, msym), fn)
+ m.SetType(mtyp)
+ m.Class_ = ir.PFUNC
+ // methodSym already marked m.Sym as a function.
+
+ f := types.NewField(mpos, msym, mtyp)
+ f.Nname = m
+ ms[i] = f
+ }
+ t.Methods().Set(ms)
+
+ r.typeExt(t)
+ for _, m := range ms {
+ r.methExt(m)
+ }
+ return n
+
+ case 'V':
+ typ := r.typ()
+
+ n := importvar(r.p.ipkg, pos, sym, typ)
+ r.varExt(n)
+ return n
+
+ default:
+ base.Fatalf("unexpected tag: %v", tag)
+ panic("unreachable")
+ }
+}
+
+func (p *importReader) value(typ *types.Type) constant.Value {
+ switch constTypeOf(typ) {
+ case constant.Bool:
+ return constant.MakeBool(p.bool())
+ case constant.String:
+ return constant.MakeString(p.string())
+ case constant.Int:
+ var i big.Int
+ p.mpint(&i, typ)
+ return makeInt(&i)
+ case constant.Float:
+ return p.float(typ)
+ case constant.Complex:
+ return makeComplex(p.float(typ), p.float(typ))
+ }
+
+ base.Fatalf("unexpected value type: %v", typ)
+ panic("unreachable")
+}
+
+func (p *importReader) mpint(x *big.Int, typ *types.Type) {
+ signed, maxBytes := intSize(typ)
+
+ maxSmall := 256 - maxBytes
+ if signed {
+ maxSmall = 256 - 2*maxBytes
+ }
+ if maxBytes == 1 {
+ maxSmall = 256
+ }
+
+ n, _ := p.ReadByte()
+ if uint(n) < maxSmall {
+ v := int64(n)
+ if signed {
+ v >>= 1
+ if n&1 != 0 {
+ v = ^v
+ }
+ }
+ x.SetInt64(v)
+ return
+ }
+
+ v := -n
+ if signed {
+ v = -(n &^ 1) >> 1
+ }
+ if v < 1 || uint(v) > maxBytes {
+ base.Fatalf("weird decoding: %v, %v => %v", n, signed, v)
+ }
+ b := make([]byte, v)
+ p.Read(b)
+ x.SetBytes(b)
+ if signed && n&1 != 0 {
+ x.Neg(x)
+ }
+}
+
+func (p *importReader) float(typ *types.Type) constant.Value {
+ var mant big.Int
+ p.mpint(&mant, typ)
+ var f big.Float
+ f.SetInt(&mant)
+ if f.Sign() != 0 {
+ f.SetMantExp(&f, int(p.int64()))
+ }
+ return constant.Make(&f)
+}
+
+func (r *importReader) ident() *types.Sym {
+ name := r.string()
+ if name == "" {
+ return nil
+ }
+ pkg := r.currPkg
+ if types.IsExported(name) {
+ pkg = types.LocalPkg
+ }
+ return pkg.Lookup(name)
+}
+
+func (r *importReader) qualifiedIdent() *ir.Ident {
+ name := r.string()
+ pkg := r.pkg()
+ sym := pkg.Lookup(name)
+ return ir.NewIdent(src.NoXPos, sym)
+}
+
+func (r *importReader) pos() src.XPos {
+ delta := r.int64()
+ r.prevColumn += delta >> 1
+ if delta&1 != 0 {
+ delta = r.int64()
+ r.prevLine += delta >> 1
+ if delta&1 != 0 {
+ r.prevBase = r.posBase()
+ }
+ }
+
+ if (r.prevBase == nil || r.prevBase.AbsFilename() == "") && r.prevLine == 0 && r.prevColumn == 0 {
+ // TODO(mdempsky): Remove once we reliably write
+ // position information for all nodes.
+ return src.NoXPos
+ }
+
+ if r.prevBase == nil {
+ base.Fatalf("missing posbase")
+ }
+ pos := src.MakePos(r.prevBase, uint(r.prevLine), uint(r.prevColumn))
+ return base.Ctxt.PosTable.XPos(pos)
+}
+
+func (r *importReader) typ() *types.Type {
+ return r.p.typAt(r.uint64())
+}
+
+func (p *iimporter) typAt(off uint64) *types.Type {
+ t, ok := p.typCache[off]
+ if !ok {
+ if off < predeclReserved {
+ base.Fatalf("predeclared type missing from cache: %d", off)
+ }
+ t = p.newReader(off-predeclReserved, nil).typ1()
+ p.typCache[off] = t
+ }
+ return t
+}
+
+func (r *importReader) typ1() *types.Type {
+ switch k := r.kind(); k {
+ default:
+ base.Fatalf("unexpected kind tag in %q: %v", r.p.ipkg.Path, k)
+ return nil
+
+ case definedType:
+ // We might be called from within doInline, in which
+ // case Sym.Def can point to declared parameters
+ // instead of the top-level types. Also, we don't
+ // support inlining functions with local defined
+ // types. Therefore, this must be a package-scope
+ // type.
+ n := expandDecl(r.qualifiedIdent())
+ if n.Op() != ir.OTYPE {
+ base.Fatalf("expected OTYPE, got %v: %v, %v", n.Op(), n.Sym(), n)
+ }
+ return n.Type()
+ case pointerType:
+ return types.NewPtr(r.typ())
+ case sliceType:
+ return types.NewSlice(r.typ())
+ case arrayType:
+ n := r.uint64()
+ return types.NewArray(r.typ(), int64(n))
+ case chanType:
+ dir := types.ChanDir(r.uint64())
+ return types.NewChan(r.typ(), dir)
+ case mapType:
+ return types.NewMap(r.typ(), r.typ())
+
+ case signatureType:
+ r.setPkg()
+ return r.signature(nil)
+
+ case structType:
+ r.setPkg()
+
+ fs := make([]*types.Field, r.uint64())
+ for i := range fs {
+ pos := r.pos()
+ sym := r.ident()
+ typ := r.typ()
+ emb := r.bool()
+ note := r.string()
+
+ f := types.NewField(pos, sym, typ)
+ if emb {
+ f.Embedded = 1
+ }
+ f.Note = note
+ fs[i] = f
+ }
+
+ return types.NewStruct(r.currPkg, fs)
+
+ case interfaceType:
+ r.setPkg()
+
+ embeddeds := make([]*types.Field, r.uint64())
+ for i := range embeddeds {
+ pos := r.pos()
+ typ := r.typ()
+
+ embeddeds[i] = types.NewField(pos, nil, typ)
+ }
+
+ methods := make([]*types.Field, r.uint64())
+ for i := range methods {
+ pos := r.pos()
+ sym := r.ident()
+ typ := r.signature(fakeRecvField())
+
+ methods[i] = types.NewField(pos, sym, typ)
+ }
+
+ t := types.NewInterface(r.currPkg, append(embeddeds, methods...))
+
+ // Ensure we expand the interface in the frontend (#25055).
+ types.CheckSize(t)
+ return t
+ }
+}
+
+func (r *importReader) kind() itag {
+ return itag(r.uint64())
+}
+
+func (r *importReader) signature(recv *types.Field) *types.Type {
+ params := r.paramList()
+ results := r.paramList()
+ if n := len(params); n > 0 {
+ params[n-1].SetIsDDD(r.bool())
+ }
+ return types.NewSignature(r.currPkg, recv, params, results)
+}
+
+func (r *importReader) paramList() []*types.Field {
+ fs := make([]*types.Field, r.uint64())
+ for i := range fs {
+ fs[i] = r.param()
+ }
+ return fs
+}
+
+func (r *importReader) param() *types.Field {
+ return types.NewField(r.pos(), r.ident(), r.typ())
+}
+
+func (r *importReader) bool() bool {
+ return r.uint64() != 0
+}
+
+func (r *importReader) int64() int64 {
+ n, err := binary.ReadVarint(r)
+ if err != nil {
+ base.Fatalf("readVarint: %v", err)
+ }
+ return n
+}
+
+func (r *importReader) uint64() uint64 {
+ n, err := binary.ReadUvarint(r)
+ if err != nil {
+ base.Fatalf("readVarint: %v", err)
+ }
+ return n
+}
+
+func (r *importReader) byte() byte {
+ x, err := r.ReadByte()
+ if err != nil {
+ base.Fatalf("declReader.ReadByte: %v", err)
+ }
+ return x
+}
+
+// Compiler-specific extensions.
+
+func (r *importReader) varExt(n ir.Node) {
+ r.linkname(n.Sym())
+ r.symIdx(n.Sym())
+}
+
+func (r *importReader) funcExt(n *ir.Name) {
+ r.linkname(n.Sym())
+ r.symIdx(n.Sym())
+
+ // Escape analysis.
+ for _, fs := range &types.RecvsParams {
+ for _, f := range fs(n.Type()).FieldSlice() {
+ f.Note = r.string()
+ }
+ }
+
+ // Inline body.
+ if u := r.uint64(); u > 0 {
+ n.Func.Inl = &ir.Inline{
+ Cost: int32(u - 1),
+ }
+ n.Func.Endlineno = r.pos()
+ }
+}
+
+func (r *importReader) methExt(m *types.Field) {
+ if r.bool() {
+ m.SetNointerface(true)
+ }
+ r.funcExt(m.Nname.(*ir.Name))
+}
+
+func (r *importReader) linkname(s *types.Sym) {
+ s.Linkname = r.string()
+}
+
+func (r *importReader) symIdx(s *types.Sym) {
+ lsym := s.Linksym()
+ idx := int32(r.int64())
+ if idx != -1 {
+ if s.Linkname != "" {
+ base.Fatalf("bad index for linknamed symbol: %v %d\n", lsym, idx)
+ }
+ lsym.SymIdx = idx
+ lsym.Set(obj.AttrIndexed, true)
+ }
+}
+
+func (r *importReader) typeExt(t *types.Type) {
+ t.SetNotInHeap(r.bool())
+ i, pi := r.int64(), r.int64()
+ if i != -1 && pi != -1 {
+ typeSymIdx[t] = [2]int64{i, pi}
+ }
+}
+
+// Map imported type T to the index of type descriptor symbols of T and *T,
+// so we can use index to reference the symbol.
+var typeSymIdx = make(map[*types.Type][2]int64)
+
+func BaseTypeIndex(t *types.Type) int64 {
+ tbase := t
+ if t.IsPtr() && t.Sym() == nil && t.Elem().Sym() != nil {
+ tbase = t.Elem()
+ }
+ i, ok := typeSymIdx[tbase]
+ if !ok {
+ return -1
+ }
+ if t != tbase {
+ return i[1]
+ }
+ return i[0]
+}
+
+func (r *importReader) doInline(fn *ir.Func) {
+ if len(fn.Inl.Body) != 0 {
+ base.Fatalf("%v already has inline body", fn)
+ }
+
+ StartFuncBody(fn)
+ body := r.stmtList()
+ FinishFuncBody()
+ if body == nil {
+ //
+ // Make sure empty body is not interpreted as
+ // no inlineable body (see also parser.fnbody)
+ // (not doing so can cause significant performance
+ // degradation due to unnecessary calls to empty
+ // functions).
+ body = []ir.Node{}
+ }
+ fn.Inl.Body = body
+
+ importlist = append(importlist, fn)
+
+ if base.Flag.E > 0 && base.Flag.LowerM > 2 {
+ if base.Flag.LowerM > 3 {
+ fmt.Printf("inl body for %v %v: %+v\n", fn, fn.Type(), ir.Nodes(fn.Inl.Body))
+ } else {
+ fmt.Printf("inl body for %v %v: %v\n", fn, fn.Type(), ir.Nodes(fn.Inl.Body))
+ }
+ }
+}
+
+// ----------------------------------------------------------------------------
+// Inlined function bodies
+
+// Approach: Read nodes and use them to create/declare the same data structures
+// as done originally by the (hidden) parser by closely following the parser's
+// original code. In other words, "parsing" the import data (which happens to
+// be encoded in binary rather textual form) is the best way at the moment to
+// re-establish the syntax tree's invariants. At some future point we might be
+// able to avoid this round-about way and create the rewritten nodes directly,
+// possibly avoiding a lot of duplicate work (name resolution, type checking).
+//
+// Refined nodes (e.g., ODOTPTR as a refinement of OXDOT) are exported as their
+// unrefined nodes (since this is what the importer uses). The respective case
+// entries are unreachable in the importer.
+
+func (r *importReader) stmtList() []ir.Node {
+ var list []ir.Node
+ for {
+ n := r.node()
+ if n == nil {
+ break
+ }
+ // OBLOCK nodes are not written to the import data directly,
+ // but the handling of ODCL calls liststmt, which creates one.
+ // Inline them into the statement list.
+ if n.Op() == ir.OBLOCK {
+ n := n.(*ir.BlockStmt)
+ list = append(list, n.List...)
+ } else {
+ list = append(list, n)
+ }
+
+ }
+ return list
+}
+
+func (r *importReader) caseList(sw ir.Node) []ir.Node {
+ namedTypeSwitch := isNamedTypeSwitch(sw)
+
+ cases := make([]ir.Node, r.uint64())
+ for i := range cases {
+ cas := ir.NewCaseStmt(r.pos(), nil, nil)
+ cas.List.Set(r.stmtList())
+ if namedTypeSwitch {
+ // Note: per-case variables will have distinct, dotted
+ // names after import. That's okay: swt.go only needs
+ // Sym for diagnostics anyway.
+ caseVar := ir.NewNameAt(cas.Pos(), r.ident())
+ Declare(caseVar, DeclContext)
+ cas.Vars = []ir.Node{caseVar}
+ caseVar.Defn = sw.(*ir.SwitchStmt).Tag
+ }
+ cas.Body.Set(r.stmtList())
+ cases[i] = cas
+ }
+ return cases
+}
+
+func (r *importReader) exprList() []ir.Node {
+ var list []ir.Node
+ for {
+ n := r.expr()
+ if n == nil {
+ break
+ }
+ list = append(list, n)
+ }
+ return list
+}
+
+func (r *importReader) expr() ir.Node {
+ n := r.node()
+ if n != nil && n.Op() == ir.OBLOCK {
+ n := n.(*ir.BlockStmt)
+ base.Fatalf("unexpected block node: %v", n)
+ }
+ return n
+}
+
+// TODO(gri) split into expr and stmt
+func (r *importReader) node() ir.Node {
+ switch op := r.op(); op {
+ // expressions
+ // case OPAREN:
+ // unreachable - unpacked by exporter
+
+ case ir.ONIL:
+ pos := r.pos()
+ typ := r.typ()
+
+ n := npos(pos, NodNil())
+ n.SetType(typ)
+ return n
+
+ case ir.OLITERAL:
+ pos := r.pos()
+ typ := r.typ()
+
+ n := npos(pos, ir.NewLiteral(r.value(typ)))
+ n.SetType(typ)
+ return n
+
+ case ir.ONONAME:
+ return r.qualifiedIdent()
+
+ case ir.ONAME:
+ return r.ident().Def.(*ir.Name)
+
+ // case OPACK, ONONAME:
+ // unreachable - should have been resolved by typechecking
+
+ case ir.OTYPE:
+ return ir.TypeNode(r.typ())
+
+ case ir.OTYPESW:
+ pos := r.pos()
+ var tag *ir.Ident
+ if s := r.ident(); s != nil {
+ tag = ir.NewIdent(pos, s)
+ }
+ expr, _ := r.exprsOrNil()
+ return ir.NewTypeSwitchGuard(pos, tag, expr)
+
+ // case OTARRAY, OTMAP, OTCHAN, OTSTRUCT, OTINTER, OTFUNC:
+ // unreachable - should have been resolved by typechecking
+
+ // case OCLOSURE:
+ // unimplemented
+
+ // case OPTRLIT:
+ // unreachable - mapped to case OADDR below by exporter
+
+ case ir.OSTRUCTLIT:
+ // TODO(mdempsky): Export position information for OSTRUCTKEY nodes.
+ savedlineno := base.Pos
+ base.Pos = r.pos()
+ n := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(r.typ()).(ir.Ntype), nil)
+ n.List.Set(r.elemList()) // special handling of field names
+ base.Pos = savedlineno
+ return n
+
+ // case OARRAYLIT, OSLICELIT, OMAPLIT:
+ // unreachable - mapped to case OCOMPLIT below by exporter
+
+ case ir.OCOMPLIT:
+ n := ir.NewCompLitExpr(r.pos(), ir.OCOMPLIT, ir.TypeNode(r.typ()).(ir.Ntype), nil)
+ n.List.Set(r.exprList())
+ return n
+
+ case ir.OKEY:
+ pos := r.pos()
+ left, right := r.exprsOrNil()
+ return ir.NewKeyExpr(pos, left, right)
+
+ // case OSTRUCTKEY:
+ // unreachable - handled in case OSTRUCTLIT by elemList
+
+ // case OCALLPART:
+ // unreachable - mapped to case OXDOT below by exporter
+
+ // case OXDOT, ODOT, ODOTPTR, ODOTINTER, ODOTMETH:
+ // unreachable - mapped to case OXDOT below by exporter
+
+ case ir.OXDOT:
+ // see parser.new_dotname
+ return ir.NewSelectorExpr(r.pos(), ir.OXDOT, r.expr(), r.ident())
+
+ // case ODOTTYPE, ODOTTYPE2:
+ // unreachable - mapped to case ODOTTYPE below by exporter
+
+ case ir.ODOTTYPE:
+ n := ir.NewTypeAssertExpr(r.pos(), r.expr(), nil)
+ n.SetType(r.typ())
+ return n
+
+ // case OINDEX, OINDEXMAP, OSLICE, OSLICESTR, OSLICEARR, OSLICE3, OSLICE3ARR:
+ // unreachable - mapped to cases below by exporter
+
+ case ir.OINDEX:
+ return ir.NewIndexExpr(r.pos(), r.expr(), r.expr())
+
+ case ir.OSLICE, ir.OSLICE3:
+ n := ir.NewSliceExpr(r.pos(), op, r.expr())
+ low, high := r.exprsOrNil()
+ var max ir.Node
+ if n.Op().IsSlice3() {
+ max = r.expr()
+ }
+ n.SetSliceBounds(low, high, max)
+ return n
+
+ // case OCONV, OCONVIFACE, OCONVNOP, OBYTES2STR, ORUNES2STR, OSTR2BYTES, OSTR2RUNES, ORUNESTR:
+ // unreachable - mapped to OCONV case below by exporter
+
+ case ir.OCONV:
+ n := ir.NewConvExpr(r.pos(), ir.OCONV, nil, r.expr())
+ n.SetType(r.typ())
+ return n
+
+ case ir.OCOPY, ir.OCOMPLEX, ir.OREAL, ir.OIMAG, ir.OAPPEND, ir.OCAP, ir.OCLOSE, ir.ODELETE, ir.OLEN, ir.OMAKE, ir.ONEW, ir.OPANIC, ir.ORECOVER, ir.OPRINT, ir.OPRINTN:
+ n := builtinCall(r.pos(), op)
+ n.Args.Set(r.exprList())
+ if op == ir.OAPPEND {
+ n.IsDDD = r.bool()
+ }
+ return n
+
+ // case OCALLFUNC, OCALLMETH, OCALLINTER, OGETG:
+ // unreachable - mapped to OCALL case below by exporter
+
+ case ir.OCALL:
+ n := ir.NewCallExpr(r.pos(), ir.OCALL, nil, nil)
+ n.PtrInit().Set(r.stmtList())
+ n.X = r.expr()
+ n.Args.Set(r.exprList())
+ n.IsDDD = r.bool()
+ return n
+
+ case ir.OMAKEMAP, ir.OMAKECHAN, ir.OMAKESLICE:
+ n := builtinCall(r.pos(), ir.OMAKE)
+ n.Args.Append(ir.TypeNode(r.typ()))
+ n.Args.Append(r.exprList()...)
+ return n
+
+ // unary expressions
+ case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT, ir.ORECV:
+ return ir.NewUnaryExpr(r.pos(), op, r.expr())
+
+ case ir.OADDR:
+ return NodAddrAt(r.pos(), r.expr())
+
+ case ir.ODEREF:
+ return ir.NewStarExpr(r.pos(), r.expr())
+
+ // binary expressions
+ case ir.OADD, ir.OAND, ir.OANDNOT, ir.ODIV, ir.OEQ, ir.OGE, ir.OGT, ir.OLE, ir.OLT,
+ ir.OLSH, ir.OMOD, ir.OMUL, ir.ONE, ir.OOR, ir.ORSH, ir.OSUB, ir.OXOR:
+ return ir.NewBinaryExpr(r.pos(), op, r.expr(), r.expr())
+
+ case ir.OANDAND, ir.OOROR:
+ return ir.NewLogicalExpr(r.pos(), op, r.expr(), r.expr())
+
+ case ir.OSEND:
+ return ir.NewSendStmt(r.pos(), r.expr(), r.expr())
+
+ case ir.OADDSTR:
+ pos := r.pos()
+ list := r.exprList()
+ x := npos(pos, list[0])
+ for _, y := range list[1:] {
+ x = ir.NewBinaryExpr(pos, ir.OADD, x, y)
+ }
+ return x
+
+ // --------------------------------------------------------------------
+ // statements
+ case ir.ODCL:
+ pos := r.pos()
+ lhs := ir.NewDeclNameAt(pos, ir.ONAME, r.ident())
+ lhs.SetType(r.typ())
+
+ Declare(lhs, ir.PAUTO)
+
+ var stmts ir.Nodes
+ stmts.Append(ir.NewDecl(base.Pos, ir.ODCL, lhs))
+ stmts.Append(ir.NewAssignStmt(base.Pos, lhs, nil))
+ return ir.NewBlockStmt(pos, stmts)
+
+ // case OAS, OASWB:
+ // unreachable - mapped to OAS case below by exporter
+
+ case ir.OAS:
+ return ir.NewAssignStmt(r.pos(), r.expr(), r.expr())
+
+ case ir.OASOP:
+ n := ir.NewAssignOpStmt(r.pos(), ir.OXXX, nil, nil)
+ n.AsOp = r.op()
+ n.X = r.expr()
+ if !r.bool() {
+ n.Y = ir.NewInt(1)
+ n.IncDec = true
+ } else {
+ n.Y = r.expr()
+ }
+ return n
+
+ // case OAS2DOTTYPE, OAS2FUNC, OAS2MAPR, OAS2RECV:
+ // unreachable - mapped to OAS2 case below by exporter
+
+ case ir.OAS2:
+ n := ir.NewAssignListStmt(r.pos(), ir.OAS2, nil, nil)
+ n.Lhs.Set(r.exprList())
+ n.Rhs.Set(r.exprList())
+ return n
+
+ case ir.ORETURN:
+ n := ir.NewReturnStmt(r.pos(), nil)
+ n.Results.Set(r.exprList())
+ return n
+
+ // case ORETJMP:
+ // unreachable - generated by compiler for trampolin routines (not exported)
+
+ case ir.OGO, ir.ODEFER:
+ return ir.NewGoDeferStmt(r.pos(), op, r.expr())
+
+ case ir.OIF:
+ n := ir.NewIfStmt(r.pos(), nil, nil, nil)
+ n.PtrInit().Set(r.stmtList())
+ n.Cond = r.expr()
+ n.Body.Set(r.stmtList())
+ n.Else.Set(r.stmtList())
+ return n
+
+ case ir.OFOR:
+ n := ir.NewForStmt(r.pos(), nil, nil, nil, nil)
+ n.PtrInit().Set(r.stmtList())
+ left, right := r.exprsOrNil()
+ n.Cond = left
+ n.Post = right
+ n.Body.Set(r.stmtList())
+ return n
+
+ case ir.ORANGE:
+ n := ir.NewRangeStmt(r.pos(), nil, nil, nil)
+ n.Vars.Set(r.stmtList())
+ n.X = r.expr()
+ n.Body.Set(r.stmtList())
+ return n
+
+ case ir.OSELECT:
+ n := ir.NewSelectStmt(r.pos(), nil)
+ n.PtrInit().Set(r.stmtList())
+ r.exprsOrNil() // TODO(rsc): Delete (and fix exporter). These are always nil.
+ n.Cases.Set(r.caseList(n))
+ return n
+
+ case ir.OSWITCH:
+ n := ir.NewSwitchStmt(r.pos(), nil, nil)
+ n.PtrInit().Set(r.stmtList())
+ left, _ := r.exprsOrNil()
+ n.Tag = left
+ n.Cases.Set(r.caseList(n))
+ return n
+
+ // case OCASE:
+ // handled by caseList
+
+ case ir.OFALL:
+ n := ir.NewBranchStmt(r.pos(), ir.OFALL, nil)
+ return n
+
+ // case OEMPTY:
+ // unreachable - not emitted by exporter
+
+ case ir.OBREAK, ir.OCONTINUE, ir.OGOTO:
+ var sym *types.Sym
+ pos := r.pos()
+ if label := r.string(); label != "" {
+ sym = Lookup(label)
+ }
+ return ir.NewBranchStmt(pos, op, sym)
+
+ case ir.OLABEL:
+ return ir.NewLabelStmt(r.pos(), Lookup(r.string()))
+
+ case ir.OEND:
+ return nil
+
+ default:
+ base.Fatalf("cannot import %v (%d) node\n"+
+ "\t==> please file an issue and assign to gri@", op, int(op))
+ panic("unreachable") // satisfy compiler
+ }
+}
+
+func (r *importReader) op() ir.Op {
+ return ir.Op(r.uint64())
+}
+
+func (r *importReader) elemList() []ir.Node {
+ c := r.uint64()
+ list := make([]ir.Node, c)
+ for i := range list {
+ s := r.ident()
+ list[i] = ir.NewStructKeyExpr(base.Pos, s, r.expr())
+ }
+ return list
+}
+
+func (r *importReader) exprsOrNil() (a, b ir.Node) {
+ ab := r.uint64()
+ if ab&1 != 0 {
+ a = r.expr()
+ }
+ if ab&2 != 0 {
+ b = r.node()
+ }
+ return
+}
+
+func builtinCall(pos src.XPos, op ir.Op) *ir.CallExpr {
+ return ir.NewCallExpr(pos, ir.OCALL, ir.NewIdent(base.Pos, types.BuiltinPkg.Lookup(ir.OpNames[op])), nil)
+}
+
+func npos(pos src.XPos, n ir.Node) ir.Node {
+ n.SetPos(pos)
+ return n
+}
diff --git a/src/cmd/compile/internal/typecheck/mapfile_mmap.go b/src/cmd/compile/internal/typecheck/mapfile_mmap.go
new file mode 100644
index 0000000000..2f3aa16dec
--- /dev/null
+++ b/src/cmd/compile/internal/typecheck/mapfile_mmap.go
@@ -0,0 +1,48 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build darwin dragonfly freebsd linux netbsd openbsd
+
+package typecheck
+
+import (
+ "os"
+ "reflect"
+ "syscall"
+ "unsafe"
+)
+
+// TODO(mdempsky): Is there a higher-level abstraction that still
+// works well for iimport?
+
+// mapFile returns length bytes from the file starting at the
+// specified offset as a string.
+func mapFile(f *os.File, offset, length int64) (string, error) {
+ // POSIX mmap: "The implementation may require that off is a
+ // multiple of the page size."
+ x := offset & int64(os.Getpagesize()-1)
+ offset -= x
+ length += x
+
+ buf, err := syscall.Mmap(int(f.Fd()), offset, int(length), syscall.PROT_READ, syscall.MAP_SHARED)
+ keepAlive(f)
+ if err != nil {
+ return "", err
+ }
+
+ buf = buf[x:]
+ pSlice := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
+
+ var res string
+ pString := (*reflect.StringHeader)(unsafe.Pointer(&res))
+
+ pString.Data = pSlice.Data
+ pString.Len = pSlice.Len
+
+ return res, nil
+}
+
+// keepAlive is a reimplementation of runtime.KeepAlive, which wasn't
+// added until Go 1.7, whereas we need to compile with Go 1.4.
+var keepAlive = func(interface{}) {}
diff --git a/src/cmd/compile/internal/typecheck/mapfile_read.go b/src/cmd/compile/internal/typecheck/mapfile_read.go
new file mode 100644
index 0000000000..4059f261d4
--- /dev/null
+++ b/src/cmd/compile/internal/typecheck/mapfile_read.go
@@ -0,0 +1,21 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build !darwin,!dragonfly,!freebsd,!linux,!netbsd,!openbsd
+
+package typecheck
+
+import (
+ "io"
+ "os"
+)
+
+func mapFile(f *os.File, offset, length int64) (string, error) {
+ buf := make([]byte, length)
+ _, err := io.ReadFull(io.NewSectionReader(f, offset, length), buf)
+ if err != nil {
+ return "", err
+ }
+ return string(buf), nil
+}
diff --git a/src/cmd/compile/internal/typecheck/mkbuiltin.go b/src/cmd/compile/internal/typecheck/mkbuiltin.go
new file mode 100644
index 0000000000..2a208d960f
--- /dev/null
+++ b/src/cmd/compile/internal/typecheck/mkbuiltin.go
@@ -0,0 +1,228 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build ignore
+
+// Generate builtin.go from builtin/runtime.go.
+
+package main
+
+import (
+ "bytes"
+ "flag"
+ "fmt"
+ "go/ast"
+ "go/format"
+ "go/parser"
+ "go/token"
+ "io"
+ "io/ioutil"
+ "log"
+ "os"
+ "path/filepath"
+ "strconv"
+ "strings"
+)
+
+var stdout = flag.Bool("stdout", false, "write to stdout instead of builtin.go")
+
+func main() {
+ flag.Parse()
+
+ var b bytes.Buffer
+ fmt.Fprintln(&b, "// Code generated by mkbuiltin.go. DO NOT EDIT.")
+ fmt.Fprintln(&b)
+ fmt.Fprintln(&b, "package typecheck")
+ fmt.Fprintln(&b)
+ fmt.Fprintln(&b, `import (`)
+ fmt.Fprintln(&b, ` "cmd/compile/internal/ir"`)
+ fmt.Fprintln(&b, ` "cmd/compile/internal/types"`)
+ fmt.Fprintln(&b, `)`)
+
+ mkbuiltin(&b, "runtime")
+
+ out, err := format.Source(b.Bytes())
+ if err != nil {
+ log.Fatal(err)
+ }
+ if *stdout {
+ _, err = os.Stdout.Write(out)
+ } else {
+ err = ioutil.WriteFile("builtin.go", out, 0666)
+ }
+ if err != nil {
+ log.Fatal(err)
+ }
+}
+
+func mkbuiltin(w io.Writer, name string) {
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, filepath.Join("builtin", name+".go"), nil, 0)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ var interner typeInterner
+
+ fmt.Fprintf(w, "var %sDecls = [...]struct { name string; tag int; typ int }{\n", name)
+ for _, decl := range f.Decls {
+ switch decl := decl.(type) {
+ case *ast.FuncDecl:
+ if decl.Recv != nil {
+ log.Fatal("methods unsupported")
+ }
+ if decl.Body != nil {
+ log.Fatal("unexpected function body")
+ }
+ fmt.Fprintf(w, "{%q, funcTag, %d},\n", decl.Name.Name, interner.intern(decl.Type))
+ case *ast.GenDecl:
+ if decl.Tok == token.IMPORT {
+ if len(decl.Specs) != 1 || decl.Specs[0].(*ast.ImportSpec).Path.Value != "\"unsafe\"" {
+ log.Fatal("runtime cannot import other package")
+ }
+ continue
+ }
+ if decl.Tok != token.VAR {
+ log.Fatal("unhandled declaration kind", decl.Tok)
+ }
+ for _, spec := range decl.Specs {
+ spec := spec.(*ast.ValueSpec)
+ if len(spec.Values) != 0 {
+ log.Fatal("unexpected values")
+ }
+ typ := interner.intern(spec.Type)
+ for _, name := range spec.Names {
+ fmt.Fprintf(w, "{%q, varTag, %d},\n", name.Name, typ)
+ }
+ }
+ default:
+ log.Fatal("unhandled decl type", decl)
+ }
+ }
+ fmt.Fprintln(w, "}")
+
+ fmt.Fprintln(w)
+ fmt.Fprintf(w, "func %sTypes() []*types.Type {\n", name)
+ fmt.Fprintf(w, "var typs [%d]*types.Type\n", len(interner.typs))
+ for i, typ := range interner.typs {
+ fmt.Fprintf(w, "typs[%d] = %s\n", i, typ)
+ }
+ fmt.Fprintln(w, "return typs[:]")
+ fmt.Fprintln(w, "}")
+}
+
+// typeInterner maps Go type expressions to compiler code that
+// constructs the denoted type. It recognizes and reuses common
+// subtype expressions.
+type typeInterner struct {
+ typs []string
+ hash map[string]int
+}
+
+func (i *typeInterner) intern(t ast.Expr) int {
+ x := i.mktype(t)
+ v, ok := i.hash[x]
+ if !ok {
+ v = len(i.typs)
+ if i.hash == nil {
+ i.hash = make(map[string]int)
+ }
+ i.hash[x] = v
+ i.typs = append(i.typs, x)
+ }
+ return v
+}
+
+func (i *typeInterner) subtype(t ast.Expr) string {
+ return fmt.Sprintf("typs[%d]", i.intern(t))
+}
+
+func (i *typeInterner) mktype(t ast.Expr) string {
+ switch t := t.(type) {
+ case *ast.Ident:
+ switch t.Name {
+ case "byte":
+ return "types.ByteType"
+ case "rune":
+ return "types.RuneType"
+ }
+ return fmt.Sprintf("types.Types[types.T%s]", strings.ToUpper(t.Name))
+ case *ast.SelectorExpr:
+ if t.X.(*ast.Ident).Name != "unsafe" || t.Sel.Name != "Pointer" {
+ log.Fatalf("unhandled type: %#v", t)
+ }
+ return "types.Types[types.TUNSAFEPTR]"
+
+ case *ast.ArrayType:
+ if t.Len == nil {
+ return fmt.Sprintf("types.NewSlice(%s)", i.subtype(t.Elt))
+ }
+ return fmt.Sprintf("types.NewArray(%s, %d)", i.subtype(t.Elt), intconst(t.Len))
+ case *ast.ChanType:
+ dir := "types.Cboth"
+ switch t.Dir {
+ case ast.SEND:
+ dir = "types.Csend"
+ case ast.RECV:
+ dir = "types.Crecv"
+ }
+ return fmt.Sprintf("types.NewChan(%s, %s)", i.subtype(t.Value), dir)
+ case *ast.FuncType:
+ return fmt.Sprintf("functype(nil, %s, %s)", i.fields(t.Params, false), i.fields(t.Results, false))
+ case *ast.InterfaceType:
+ if len(t.Methods.List) != 0 {
+ log.Fatal("non-empty interfaces unsupported")
+ }
+ return "types.Types[types.TINTER]"
+ case *ast.MapType:
+ return fmt.Sprintf("types.NewMap(%s, %s)", i.subtype(t.Key), i.subtype(t.Value))
+ case *ast.StarExpr:
+ return fmt.Sprintf("types.NewPtr(%s)", i.subtype(t.X))
+ case *ast.StructType:
+ return fmt.Sprintf("tostruct(%s)", i.fields(t.Fields, true))
+
+ default:
+ log.Fatalf("unhandled type: %#v", t)
+ panic("unreachable")
+ }
+}
+
+func (i *typeInterner) fields(fl *ast.FieldList, keepNames bool) string {
+ if fl == nil || len(fl.List) == 0 {
+ return "nil"
+ }
+ var res []string
+ for _, f := range fl.List {
+ typ := i.subtype(f.Type)
+ if len(f.Names) == 0 {
+ res = append(res, fmt.Sprintf("anonfield(%s)", typ))
+ } else {
+ for _, name := range f.Names {
+ if keepNames {
+ res = append(res, fmt.Sprintf("namedfield(%q, %s)", name.Name, typ))
+ } else {
+ res = append(res, fmt.Sprintf("anonfield(%s)", typ))
+ }
+ }
+ }
+ }
+ return fmt.Sprintf("[]*ir.Field{%s}", strings.Join(res, ", "))
+}
+
+func intconst(e ast.Expr) int64 {
+ switch e := e.(type) {
+ case *ast.BasicLit:
+ if e.Kind != token.INT {
+ log.Fatalf("expected INT, got %v", e.Kind)
+ }
+ x, err := strconv.ParseInt(e.Value, 0, 64)
+ if err != nil {
+ log.Fatal(err)
+ }
+ return x
+ default:
+ log.Fatalf("unhandled expr: %#v", e)
+ panic("unreachable")
+ }
+}
diff --git a/src/cmd/compile/internal/typecheck/stmt.go b/src/cmd/compile/internal/typecheck/stmt.go
new file mode 100644
index 0000000000..889ee06d6e
--- /dev/null
+++ b/src/cmd/compile/internal/typecheck/stmt.go
@@ -0,0 +1,435 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typecheck
+
+import (
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/types"
+ "cmd/internal/src"
+)
+
+// range
+func typecheckrange(n *ir.RangeStmt) {
+ // Typechecking order is important here:
+ // 0. first typecheck range expression (slice/map/chan),
+ // it is evaluated only once and so logically it is not part of the loop.
+ // 1. typecheck produced values,
+ // this part can declare new vars and so it must be typechecked before body,
+ // because body can contain a closure that captures the vars.
+ // 2. decldepth++ to denote loop body.
+ // 3. typecheck body.
+ // 4. decldepth--.
+ typecheckrangeExpr(n)
+
+ // second half of dance, the first half being typecheckrangeExpr
+ n.SetTypecheck(1)
+ ls := n.Vars
+ for i1, n1 := range ls {
+ if n1.Typecheck() == 0 {
+ ls[i1] = AssignExpr(ls[i1])
+ }
+ }
+
+ decldepth++
+ Stmts(n.Body)
+ decldepth--
+}
+
+func typecheckrangeExpr(n *ir.RangeStmt) {
+ n.X = Expr(n.X)
+
+ t := n.X.Type()
+ if t == nil {
+ return
+ }
+ // delicate little dance. see typecheckas2
+ ls := n.Vars
+ for i1, n1 := range ls {
+ if !ir.DeclaredBy(n1, n) {
+ ls[i1] = AssignExpr(ls[i1])
+ }
+ }
+
+ if t.IsPtr() && t.Elem().IsArray() {
+ t = t.Elem()
+ }
+ n.SetType(t)
+
+ var t1, t2 *types.Type
+ toomany := false
+ switch t.Kind() {
+ default:
+ base.ErrorfAt(n.Pos(), "cannot range over %L", n.X)
+ return
+
+ case types.TARRAY, types.TSLICE:
+ t1 = types.Types[types.TINT]
+ t2 = t.Elem()
+
+ case types.TMAP:
+ t1 = t.Key()
+ t2 = t.Elem()
+
+ case types.TCHAN:
+ if !t.ChanDir().CanRecv() {
+ base.ErrorfAt(n.Pos(), "invalid operation: range %v (receive from send-only type %v)", n.X, n.X.Type())
+ return
+ }
+
+ t1 = t.Elem()
+ t2 = nil
+ if len(n.Vars) == 2 {
+ toomany = true
+ }
+
+ case types.TSTRING:
+ t1 = types.Types[types.TINT]
+ t2 = types.RuneType
+ }
+
+ if len(n.Vars) > 2 || toomany {
+ base.ErrorfAt(n.Pos(), "too many variables in range")
+ }
+
+ var v1, v2 ir.Node
+ if len(n.Vars) != 0 {
+ v1 = n.Vars[0]
+ }
+ if len(n.Vars) > 1 {
+ v2 = n.Vars[1]
+ }
+
+ // this is not only an optimization but also a requirement in the spec.
+ // "if the second iteration variable is the blank identifier, the range
+ // clause is equivalent to the same clause with only the first variable
+ // present."
+ if ir.IsBlank(v2) {
+ if v1 != nil {
+ n.Vars = []ir.Node{v1}
+ }
+ v2 = nil
+ }
+
+ if v1 != nil {
+ if ir.DeclaredBy(v1, n) {
+ v1.SetType(t1)
+ } else if v1.Type() != nil {
+ if op, why := assignop(t1, v1.Type()); op == ir.OXXX {
+ base.ErrorfAt(n.Pos(), "cannot assign type %v to %L in range%s", t1, v1, why)
+ }
+ }
+ checkassign(n, v1)
+ }
+
+ if v2 != nil {
+ if ir.DeclaredBy(v2, n) {
+ v2.SetType(t2)
+ } else if v2.Type() != nil {
+ if op, why := assignop(t2, v2.Type()); op == ir.OXXX {
+ base.ErrorfAt(n.Pos(), "cannot assign type %v to %L in range%s", t2, v2, why)
+ }
+ }
+ checkassign(n, v2)
+ }
+}
+
+// select
+func typecheckselect(sel *ir.SelectStmt) {
+ var def ir.Node
+ lno := ir.SetPos(sel)
+ Stmts(sel.Init())
+ for _, ncase := range sel.Cases {
+ ncase := ncase.(*ir.CaseStmt)
+
+ if len(ncase.List) == 0 {
+ // default
+ if def != nil {
+ base.ErrorfAt(ncase.Pos(), "multiple defaults in select (first at %v)", ir.Line(def))
+ } else {
+ def = ncase
+ }
+ } else if len(ncase.List) > 1 {
+ base.ErrorfAt(ncase.Pos(), "select cases cannot be lists")
+ } else {
+ ncase.List[0] = Stmt(ncase.List[0])
+ n := ncase.List[0]
+ ncase.Comm = n
+ ncase.List.Set(nil)
+ oselrecv2 := func(dst, recv ir.Node, colas bool) {
+ n := ir.NewAssignListStmt(n.Pos(), ir.OSELRECV2, nil, nil)
+ n.Lhs = []ir.Node{dst, ir.BlankNode}
+ n.Rhs = []ir.Node{recv}
+ n.Def = colas
+ n.SetTypecheck(1)
+ ncase.Comm = n
+ }
+ switch n.Op() {
+ default:
+ pos := n.Pos()
+ if n.Op() == ir.ONAME {
+ // We don't have the right position for ONAME nodes (see #15459 and
+ // others). Using ncase.Pos for now as it will provide the correct
+ // line number (assuming the expression follows the "case" keyword
+ // on the same line). This matches the approach before 1.10.
+ pos = ncase.Pos()
+ }
+ base.ErrorfAt(pos, "select case must be receive, send or assign recv")
+
+ case ir.OAS:
+ // convert x = <-c into x, _ = <-c
+ // remove implicit conversions; the eventual assignment
+ // will reintroduce them.
+ n := n.(*ir.AssignStmt)
+ if r := n.Y; r.Op() == ir.OCONVNOP || r.Op() == ir.OCONVIFACE {
+ r := r.(*ir.ConvExpr)
+ if r.Implicit() {
+ n.Y = r.X
+ }
+ }
+ if n.Y.Op() != ir.ORECV {
+ base.ErrorfAt(n.Pos(), "select assignment must have receive on right hand side")
+ break
+ }
+ oselrecv2(n.X, n.Y, n.Def)
+
+ case ir.OAS2RECV:
+ n := n.(*ir.AssignListStmt)
+ if n.Rhs[0].Op() != ir.ORECV {
+ base.ErrorfAt(n.Pos(), "select assignment must have receive on right hand side")
+ break
+ }
+ n.SetOp(ir.OSELRECV2)
+
+ case ir.ORECV:
+ // convert <-c into _, _ = <-c
+ n := n.(*ir.UnaryExpr)
+ oselrecv2(ir.BlankNode, n, false)
+
+ case ir.OSEND:
+ break
+ }
+ }
+
+ Stmts(ncase.Body)
+ }
+
+ base.Pos = lno
+}
+
+type typeSet struct {
+ m map[string][]typeSetEntry
+}
+
+func (s *typeSet) add(pos src.XPos, typ *types.Type) {
+ if s.m == nil {
+ s.m = make(map[string][]typeSetEntry)
+ }
+
+ // LongString does not uniquely identify types, so we need to
+ // disambiguate collisions with types.Identical.
+ // TODO(mdempsky): Add a method that *is* unique.
+ ls := typ.LongString()
+ prevs := s.m[ls]
+ for _, prev := range prevs {
+ if types.Identical(typ, prev.typ) {
+ base.ErrorfAt(pos, "duplicate case %v in type switch\n\tprevious case at %s", typ, base.FmtPos(prev.pos))
+ return
+ }
+ }
+ s.m[ls] = append(prevs, typeSetEntry{pos, typ})
+}
+
+type typeSetEntry struct {
+ pos src.XPos
+ typ *types.Type
+}
+
+func typecheckExprSwitch(n *ir.SwitchStmt) {
+ t := types.Types[types.TBOOL]
+ if n.Tag != nil {
+ n.Tag = Expr(n.Tag)
+ n.Tag = DefaultLit(n.Tag, nil)
+ t = n.Tag.Type()
+ }
+
+ var nilonly string
+ if t != nil {
+ switch {
+ case t.IsMap():
+ nilonly = "map"
+ case t.Kind() == types.TFUNC:
+ nilonly = "func"
+ case t.IsSlice():
+ nilonly = "slice"
+
+ case !types.IsComparable(t):
+ if t.IsStruct() {
+ base.ErrorfAt(n.Pos(), "cannot switch on %L (struct containing %v cannot be compared)", n.Tag, types.IncomparableField(t).Type)
+ } else {
+ base.ErrorfAt(n.Pos(), "cannot switch on %L", n.Tag)
+ }
+ t = nil
+ }
+ }
+
+ var defCase ir.Node
+ var cs constSet
+ for _, ncase := range n.Cases {
+ ncase := ncase.(*ir.CaseStmt)
+ ls := ncase.List
+ if len(ls) == 0 { // default:
+ if defCase != nil {
+ base.ErrorfAt(ncase.Pos(), "multiple defaults in switch (first at %v)", ir.Line(defCase))
+ } else {
+ defCase = ncase
+ }
+ }
+
+ for i := range ls {
+ ir.SetPos(ncase)
+ ls[i] = Expr(ls[i])
+ ls[i] = DefaultLit(ls[i], t)
+ n1 := ls[i]
+ if t == nil || n1.Type() == nil {
+ continue
+ }
+
+ if nilonly != "" && !ir.IsNil(n1) {
+ base.ErrorfAt(ncase.Pos(), "invalid case %v in switch (can only compare %s %v to nil)", n1, nilonly, n.Tag)
+ } else if t.IsInterface() && !n1.Type().IsInterface() && !types.IsComparable(n1.Type()) {
+ base.ErrorfAt(ncase.Pos(), "invalid case %L in switch (incomparable type)", n1)
+ } else {
+ op1, _ := assignop(n1.Type(), t)
+ op2, _ := assignop(t, n1.Type())
+ if op1 == ir.OXXX && op2 == ir.OXXX {
+ if n.Tag != nil {
+ base.ErrorfAt(ncase.Pos(), "invalid case %v in switch on %v (mismatched types %v and %v)", n1, n.Tag, n1.Type(), t)
+ } else {
+ base.ErrorfAt(ncase.Pos(), "invalid case %v in switch (mismatched types %v and bool)", n1, n1.Type())
+ }
+ }
+ }
+
+ // Don't check for duplicate bools. Although the spec allows it,
+ // (1) the compiler hasn't checked it in the past, so compatibility mandates it, and
+ // (2) it would disallow useful things like
+ // case GOARCH == "arm" && GOARM == "5":
+ // case GOARCH == "arm":
+ // which would both evaluate to false for non-ARM compiles.
+ if !n1.Type().IsBoolean() {
+ cs.add(ncase.Pos(), n1, "case", "switch")
+ }
+ }
+
+ Stmts(ncase.Body)
+ }
+}
+
+func typecheckTypeSwitch(n *ir.SwitchStmt) {
+ guard := n.Tag.(*ir.TypeSwitchGuard)
+ guard.X = Expr(guard.X)
+ t := guard.X.Type()
+ if t != nil && !t.IsInterface() {
+ base.ErrorfAt(n.Pos(), "cannot type switch on non-interface value %L", guard.X)
+ t = nil
+ }
+
+ // We don't actually declare the type switch's guarded
+ // declaration itself. So if there are no cases, we won't
+ // notice that it went unused.
+ if v := guard.Tag; v != nil && !ir.IsBlank(v) && len(n.Cases) == 0 {
+ base.ErrorfAt(v.Pos(), "%v declared but not used", v.Sym())
+ }
+
+ var defCase, nilCase ir.Node
+ var ts typeSet
+ for _, ncase := range n.Cases {
+ ncase := ncase.(*ir.CaseStmt)
+ ls := ncase.List
+ if len(ls) == 0 { // default:
+ if defCase != nil {
+ base.ErrorfAt(ncase.Pos(), "multiple defaults in switch (first at %v)", ir.Line(defCase))
+ } else {
+ defCase = ncase
+ }
+ }
+
+ for i := range ls {
+ ls[i] = check(ls[i], ctxExpr|ctxType)
+ n1 := ls[i]
+ if t == nil || n1.Type() == nil {
+ continue
+ }
+
+ var missing, have *types.Field
+ var ptr int
+ if ir.IsNil(n1) { // case nil:
+ if nilCase != nil {
+ base.ErrorfAt(ncase.Pos(), "multiple nil cases in type switch (first at %v)", ir.Line(nilCase))
+ } else {
+ nilCase = ncase
+ }
+ continue
+ }
+ if n1.Op() != ir.OTYPE {
+ base.ErrorfAt(ncase.Pos(), "%L is not a type", n1)
+ continue
+ }
+ if !n1.Type().IsInterface() && !implements(n1.Type(), t, &missing, &have, &ptr) && !missing.Broke() {
+ if have != nil && !have.Broke() {
+ base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
+ " (wrong type for %v method)\n\thave %v%S\n\twant %v%S", guard.X, n1.Type(), missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
+ } else if ptr != 0 {
+ base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
+ " (%v method has pointer receiver)", guard.X, n1.Type(), missing.Sym)
+ } else {
+ base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
+ " (missing %v method)", guard.X, n1.Type(), missing.Sym)
+ }
+ continue
+ }
+
+ ts.add(ncase.Pos(), n1.Type())
+ }
+
+ if len(ncase.Vars) != 0 {
+ // Assign the clause variable's type.
+ vt := t
+ if len(ls) == 1 {
+ if ls[0].Op() == ir.OTYPE {
+ vt = ls[0].Type()
+ } else if !ir.IsNil(ls[0]) {
+ // Invalid single-type case;
+ // mark variable as broken.
+ vt = nil
+ }
+ }
+
+ nvar := ncase.Vars[0]
+ nvar.SetType(vt)
+ if vt != nil {
+ nvar = AssignExpr(nvar)
+ } else {
+ // Clause variable is broken; prevent typechecking.
+ nvar.SetTypecheck(1)
+ nvar.SetWalkdef(1)
+ }
+ ncase.Vars[0] = nvar
+ }
+
+ Stmts(ncase.Body)
+ }
+}
+
+// typecheckswitch typechecks a switch statement.
+func typecheckswitch(n *ir.SwitchStmt) {
+ Stmts(n.Init())
+ if n.Tag != nil && n.Tag.Op() == ir.OTYPESW {
+ typecheckTypeSwitch(n)
+ } else {
+ typecheckExprSwitch(n)
+ }
+}
diff --git a/src/cmd/compile/internal/typecheck/subr.go b/src/cmd/compile/internal/typecheck/subr.go
new file mode 100644
index 0000000000..22ebf2a4b3
--- /dev/null
+++ b/src/cmd/compile/internal/typecheck/subr.go
@@ -0,0 +1,793 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typecheck
+
+import (
+ "fmt"
+ "sort"
+ "strconv"
+ "strings"
+
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/types"
+ "cmd/internal/src"
+)
+
+func AssignConv(n ir.Node, t *types.Type, context string) ir.Node {
+ return assignconvfn(n, t, func() string { return context })
+}
+
+// DotImportRefs maps idents introduced by importDot back to the
+// ir.PkgName they were dot-imported through.
+var DotImportRefs map[*ir.Ident]*ir.PkgName
+
+// LookupNum looks up the symbol starting with prefix and ending with
+// the decimal n. If prefix is too long, LookupNum panics.
+func LookupNum(prefix string, n int) *types.Sym {
+ var buf [20]byte // plenty long enough for all current users
+ copy(buf[:], prefix)
+ b := strconv.AppendInt(buf[:len(prefix)], int64(n), 10)
+ return types.LocalPkg.LookupBytes(b)
+}
+
+// Given funarg struct list, return list of fn args.
+func NewFuncParams(tl *types.Type, mustname bool) []*ir.Field {
+ var args []*ir.Field
+ gen := 0
+ for _, t := range tl.Fields().Slice() {
+ s := t.Sym
+ if mustname && (s == nil || s.Name == "_") {
+ // invent a name so that we can refer to it in the trampoline
+ s = LookupNum(".anon", gen)
+ gen++
+ }
+ a := ir.NewField(base.Pos, s, nil, t.Type)
+ a.Pos = t.Pos
+ a.IsDDD = t.IsDDD()
+ args = append(args, a)
+ }
+
+ return args
+}
+
+// newname returns a new ONAME Node associated with symbol s.
+func NewName(s *types.Sym) *ir.Name {
+ n := ir.NewNameAt(base.Pos, s)
+ n.Curfn = ir.CurFunc
+ return n
+}
+
+// NodAddr returns a node representing &n at base.Pos.
+func NodAddr(n ir.Node) *ir.AddrExpr {
+ return NodAddrAt(base.Pos, n)
+}
+
+// nodAddrPos returns a node representing &n at position pos.
+func NodAddrAt(pos src.XPos, n ir.Node) *ir.AddrExpr {
+ return ir.NewAddrExpr(pos, n)
+}
+
+func NodNil() ir.Node {
+ n := ir.NewNilExpr(base.Pos)
+ n.SetType(types.Types[types.TNIL])
+ return n
+}
+
+// in T.field
+// find missing fields that
+// will give shortest unique addressing.
+// modify the tree with missing type names.
+func AddImplicitDots(n *ir.SelectorExpr) *ir.SelectorExpr {
+ n.X = check(n.X, ctxType|ctxExpr)
+ if n.X.Diag() {
+ n.SetDiag(true)
+ }
+ t := n.X.Type()
+ if t == nil {
+ return n
+ }
+
+ if n.X.Op() == ir.OTYPE {
+ return n
+ }
+
+ s := n.Sel
+ if s == nil {
+ return n
+ }
+
+ switch path, ambig := dotpath(s, t, nil, false); {
+ case path != nil:
+ // rebuild elided dots
+ for c := len(path) - 1; c >= 0; c-- {
+ dot := ir.NewSelectorExpr(base.Pos, ir.ODOT, n.X, path[c].field.Sym)
+ dot.SetImplicit(true)
+ dot.SetType(path[c].field.Type)
+ n.X = dot
+ }
+ case ambig:
+ base.Errorf("ambiguous selector %v", n)
+ n.X = nil
+ }
+
+ return n
+}
+
+func CalcMethods(t *types.Type) {
+ if t == nil || t.AllMethods().Len() != 0 {
+ return
+ }
+
+ // mark top-level method symbols
+ // so that expand1 doesn't consider them.
+ for _, f := range t.Methods().Slice() {
+ f.Sym.SetUniq(true)
+ }
+
+ // generate all reachable methods
+ slist = slist[:0]
+ expand1(t, true)
+
+ // check each method to be uniquely reachable
+ var ms []*types.Field
+ for i, sl := range slist {
+ slist[i].field = nil
+ sl.field.Sym.SetUniq(false)
+
+ var f *types.Field
+ path, _ := dotpath(sl.field.Sym, t, &f, false)
+ if path == nil {
+ continue
+ }
+
+ // dotpath may have dug out arbitrary fields, we only want methods.
+ if !f.IsMethod() {
+ continue
+ }
+
+ // add it to the base type method list
+ f = f.Copy()
+ f.Embedded = 1 // needs a trampoline
+ for _, d := range path {
+ if d.field.Type.IsPtr() {
+ f.Embedded = 2
+ break
+ }
+ }
+ ms = append(ms, f)
+ }
+
+ for _, f := range t.Methods().Slice() {
+ f.Sym.SetUniq(false)
+ }
+
+ ms = append(ms, t.Methods().Slice()...)
+ sort.Sort(types.MethodsByName(ms))
+ t.AllMethods().Set(ms)
+}
+
+// adddot1 returns the number of fields or methods named s at depth d in Type t.
+// If exactly one exists, it will be returned in *save (if save is not nil),
+// and dotlist will contain the path of embedded fields traversed to find it,
+// in reverse order. If none exist, more will indicate whether t contains any
+// embedded fields at depth d, so callers can decide whether to retry at
+// a greater depth.
+func adddot1(s *types.Sym, t *types.Type, d int, save **types.Field, ignorecase bool) (c int, more bool) {
+ if t.Recur() {
+ return
+ }
+ t.SetRecur(true)
+ defer t.SetRecur(false)
+
+ var u *types.Type
+ d--
+ if d < 0 {
+ // We've reached our target depth. If t has any fields/methods
+ // named s, then we're done. Otherwise, we still need to check
+ // below for embedded fields.
+ c = lookdot0(s, t, save, ignorecase)
+ if c != 0 {
+ return c, false
+ }
+ }
+
+ u = t
+ if u.IsPtr() {
+ u = u.Elem()
+ }
+ if !u.IsStruct() && !u.IsInterface() {
+ return c, false
+ }
+
+ for _, f := range u.Fields().Slice() {
+ if f.Embedded == 0 || f.Sym == nil {
+ continue
+ }
+ if d < 0 {
+ // Found an embedded field at target depth.
+ return c, true
+ }
+ a, more1 := adddot1(s, f.Type, d, save, ignorecase)
+ if a != 0 && c == 0 {
+ dotlist[d].field = f
+ }
+ c += a
+ if more1 {
+ more = true
+ }
+ }
+
+ return c, more
+}
+
+// dotlist is used by adddot1 to record the path of embedded fields
+// used to access a target field or method.
+// Must be non-nil so that dotpath returns a non-nil slice even if d is zero.
+var dotlist = make([]dlist, 10)
+
+// Convert node n for assignment to type t.
+func assignconvfn(n ir.Node, t *types.Type, context func() string) ir.Node {
+ if n == nil || n.Type() == nil || n.Type().Broke() {
+ return n
+ }
+
+ if t.Kind() == types.TBLANK && n.Type().Kind() == types.TNIL {
+ base.Errorf("use of untyped nil")
+ }
+
+ n = convlit1(n, t, false, context)
+ if n.Type() == nil {
+ return n
+ }
+ if t.Kind() == types.TBLANK {
+ return n
+ }
+
+ // Convert ideal bool from comparison to plain bool
+ // if the next step is non-bool (like interface{}).
+ if n.Type() == types.UntypedBool && !t.IsBoolean() {
+ if n.Op() == ir.ONAME || n.Op() == ir.OLITERAL {
+ r := ir.NewConvExpr(base.Pos, ir.OCONVNOP, nil, n)
+ r.SetType(types.Types[types.TBOOL])
+ r.SetTypecheck(1)
+ r.SetImplicit(true)
+ n = r
+ }
+ }
+
+ if types.Identical(n.Type(), t) {
+ return n
+ }
+
+ op, why := assignop(n.Type(), t)
+ if op == ir.OXXX {
+ base.Errorf("cannot use %L as type %v in %s%s", n, t, context(), why)
+ op = ir.OCONV
+ }
+
+ r := ir.NewConvExpr(base.Pos, op, t, n)
+ r.SetTypecheck(1)
+ r.SetImplicit(true)
+ return r
+}
+
+// Is type src assignment compatible to type dst?
+// If so, return op code to use in conversion.
+// If not, return OXXX. In this case, the string return parameter may
+// hold a reason why. In all other cases, it'll be the empty string.
+func assignop(src, dst *types.Type) (ir.Op, string) {
+ if src == dst {
+ return ir.OCONVNOP, ""
+ }
+ if src == nil || dst == nil || src.Kind() == types.TFORW || dst.Kind() == types.TFORW || src.Underlying() == nil || dst.Underlying() == nil {
+ return ir.OXXX, ""
+ }
+
+ // 1. src type is identical to dst.
+ if types.Identical(src, dst) {
+ return ir.OCONVNOP, ""
+ }
+
+ // 2. src and dst have identical underlying types
+ // and either src or dst is not a named type or
+ // both are empty interface types.
+ // For assignable but different non-empty interface types,
+ // we want to recompute the itab. Recomputing the itab ensures
+ // that itabs are unique (thus an interface with a compile-time
+ // type I has an itab with interface type I).
+ if types.Identical(src.Underlying(), dst.Underlying()) {
+ if src.IsEmptyInterface() {
+ // Conversion between two empty interfaces
+ // requires no code.
+ return ir.OCONVNOP, ""
+ }
+ if (src.Sym() == nil || dst.Sym() == nil) && !src.IsInterface() {
+ // Conversion between two types, at least one unnamed,
+ // needs no conversion. The exception is nonempty interfaces
+ // which need to have their itab updated.
+ return ir.OCONVNOP, ""
+ }
+ }
+
+ // 3. dst is an interface type and src implements dst.
+ if dst.IsInterface() && src.Kind() != types.TNIL {
+ var missing, have *types.Field
+ var ptr int
+ if implements(src, dst, &missing, &have, &ptr) {
+ // Call itabname so that (src, dst)
+ // gets added to itabs early, which allows
+ // us to de-virtualize calls through this
+ // type/interface pair later. See peekitabs in reflect.go
+ if types.IsDirectIface(src) && !dst.IsEmptyInterface() {
+ NeedITab(src, dst)
+ }
+
+ return ir.OCONVIFACE, ""
+ }
+
+ // we'll have complained about this method anyway, suppress spurious messages.
+ if have != nil && have.Sym == missing.Sym && (have.Type.Broke() || missing.Type.Broke()) {
+ return ir.OCONVIFACE, ""
+ }
+
+ var why string
+ if isptrto(src, types.TINTER) {
+ why = fmt.Sprintf(":\n\t%v is pointer to interface, not interface", src)
+ } else if have != nil && have.Sym == missing.Sym && have.Nointerface() {
+ why = fmt.Sprintf(":\n\t%v does not implement %v (%v method is marked 'nointerface')", src, dst, missing.Sym)
+ } else if have != nil && have.Sym == missing.Sym {
+ why = fmt.Sprintf(":\n\t%v does not implement %v (wrong type for %v method)\n"+
+ "\t\thave %v%S\n\t\twant %v%S", src, dst, missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
+ } else if ptr != 0 {
+ why = fmt.Sprintf(":\n\t%v does not implement %v (%v method has pointer receiver)", src, dst, missing.Sym)
+ } else if have != nil {
+ why = fmt.Sprintf(":\n\t%v does not implement %v (missing %v method)\n"+
+ "\t\thave %v%S\n\t\twant %v%S", src, dst, missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
+ } else {
+ why = fmt.Sprintf(":\n\t%v does not implement %v (missing %v method)", src, dst, missing.Sym)
+ }
+
+ return ir.OXXX, why
+ }
+
+ if isptrto(dst, types.TINTER) {
+ why := fmt.Sprintf(":\n\t%v is pointer to interface, not interface", dst)
+ return ir.OXXX, why
+ }
+
+ if src.IsInterface() && dst.Kind() != types.TBLANK {
+ var missing, have *types.Field
+ var ptr int
+ var why string
+ if implements(dst, src, &missing, &have, &ptr) {
+ why = ": need type assertion"
+ }
+ return ir.OXXX, why
+ }
+
+ // 4. src is a bidirectional channel value, dst is a channel type,
+ // src and dst have identical element types, and
+ // either src or dst is not a named type.
+ if src.IsChan() && src.ChanDir() == types.Cboth && dst.IsChan() {
+ if types.Identical(src.Elem(), dst.Elem()) && (src.Sym() == nil || dst.Sym() == nil) {
+ return ir.OCONVNOP, ""
+ }
+ }
+
+ // 5. src is the predeclared identifier nil and dst is a nillable type.
+ if src.Kind() == types.TNIL {
+ switch dst.Kind() {
+ case types.TPTR,
+ types.TFUNC,
+ types.TMAP,
+ types.TCHAN,
+ types.TINTER,
+ types.TSLICE:
+ return ir.OCONVNOP, ""
+ }
+ }
+
+ // 6. rule about untyped constants - already converted by defaultlit.
+
+ // 7. Any typed value can be assigned to the blank identifier.
+ if dst.Kind() == types.TBLANK {
+ return ir.OCONVNOP, ""
+ }
+
+ return ir.OXXX, ""
+}
+
+// Can we convert a value of type src to a value of type dst?
+// If so, return op code to use in conversion (maybe OCONVNOP).
+// If not, return OXXX. In this case, the string return parameter may
+// hold a reason why. In all other cases, it'll be the empty string.
+// srcConstant indicates whether the value of type src is a constant.
+func convertop(srcConstant bool, src, dst *types.Type) (ir.Op, string) {
+ if src == dst {
+ return ir.OCONVNOP, ""
+ }
+ if src == nil || dst == nil {
+ return ir.OXXX, ""
+ }
+
+ // Conversions from regular to go:notinheap are not allowed
+ // (unless it's unsafe.Pointer). These are runtime-specific
+ // rules.
+ // (a) Disallow (*T) to (*U) where T is go:notinheap but U isn't.
+ if src.IsPtr() && dst.IsPtr() && dst.Elem().NotInHeap() && !src.Elem().NotInHeap() {
+ why := fmt.Sprintf(":\n\t%v is incomplete (or unallocatable), but %v is not", dst.Elem(), src.Elem())
+ return ir.OXXX, why
+ }
+ // (b) Disallow string to []T where T is go:notinheap.
+ if src.IsString() && dst.IsSlice() && dst.Elem().NotInHeap() && (dst.Elem().Kind() == types.ByteType.Kind() || dst.Elem().Kind() == types.RuneType.Kind()) {
+ why := fmt.Sprintf(":\n\t%v is incomplete (or unallocatable)", dst.Elem())
+ return ir.OXXX, why
+ }
+
+ // 1. src can be assigned to dst.
+ op, why := assignop(src, dst)
+ if op != ir.OXXX {
+ return op, why
+ }
+
+ // The rules for interfaces are no different in conversions
+ // than assignments. If interfaces are involved, stop now
+ // with the good message from assignop.
+ // Otherwise clear the error.
+ if src.IsInterface() || dst.IsInterface() {
+ return ir.OXXX, why
+ }
+
+ // 2. Ignoring struct tags, src and dst have identical underlying types.
+ if types.IdenticalIgnoreTags(src.Underlying(), dst.Underlying()) {
+ return ir.OCONVNOP, ""
+ }
+
+ // 3. src and dst are unnamed pointer types and, ignoring struct tags,
+ // their base types have identical underlying types.
+ if src.IsPtr() && dst.IsPtr() && src.Sym() == nil && dst.Sym() == nil {
+ if types.IdenticalIgnoreTags(src.Elem().Underlying(), dst.Elem().Underlying()) {
+ return ir.OCONVNOP, ""
+ }
+ }
+
+ // 4. src and dst are both integer or floating point types.
+ if (src.IsInteger() || src.IsFloat()) && (dst.IsInteger() || dst.IsFloat()) {
+ if types.SimType[src.Kind()] == types.SimType[dst.Kind()] {
+ return ir.OCONVNOP, ""
+ }
+ return ir.OCONV, ""
+ }
+
+ // 5. src and dst are both complex types.
+ if src.IsComplex() && dst.IsComplex() {
+ if types.SimType[src.Kind()] == types.SimType[dst.Kind()] {
+ return ir.OCONVNOP, ""
+ }
+ return ir.OCONV, ""
+ }
+
+ // Special case for constant conversions: any numeric
+ // conversion is potentially okay. We'll validate further
+ // within evconst. See #38117.
+ if srcConstant && (src.IsInteger() || src.IsFloat() || src.IsComplex()) && (dst.IsInteger() || dst.IsFloat() || dst.IsComplex()) {
+ return ir.OCONV, ""
+ }
+
+ // 6. src is an integer or has type []byte or []rune
+ // and dst is a string type.
+ if src.IsInteger() && dst.IsString() {
+ return ir.ORUNESTR, ""
+ }
+
+ if src.IsSlice() && dst.IsString() {
+ if src.Elem().Kind() == types.ByteType.Kind() {
+ return ir.OBYTES2STR, ""
+ }
+ if src.Elem().Kind() == types.RuneType.Kind() {
+ return ir.ORUNES2STR, ""
+ }
+ }
+
+ // 7. src is a string and dst is []byte or []rune.
+ // String to slice.
+ if src.IsString() && dst.IsSlice() {
+ if dst.Elem().Kind() == types.ByteType.Kind() {
+ return ir.OSTR2BYTES, ""
+ }
+ if dst.Elem().Kind() == types.RuneType.Kind() {
+ return ir.OSTR2RUNES, ""
+ }
+ }
+
+ // 8. src is a pointer or uintptr and dst is unsafe.Pointer.
+ if (src.IsPtr() || src.IsUintptr()) && dst.IsUnsafePtr() {
+ return ir.OCONVNOP, ""
+ }
+
+ // 9. src is unsafe.Pointer and dst is a pointer or uintptr.
+ if src.IsUnsafePtr() && (dst.IsPtr() || dst.IsUintptr()) {
+ return ir.OCONVNOP, ""
+ }
+
+ // src is map and dst is a pointer to corresponding hmap.
+ // This rule is needed for the implementation detail that
+ // go gc maps are implemented as a pointer to a hmap struct.
+ if src.Kind() == types.TMAP && dst.IsPtr() &&
+ src.MapType().Hmap == dst.Elem() {
+ return ir.OCONVNOP, ""
+ }
+
+ return ir.OXXX, ""
+}
+
+// Code to resolve elided DOTs in embedded types.
+
+// A dlist stores a pointer to a TFIELD Type embedded within
+// a TSTRUCT or TINTER Type.
+type dlist struct {
+ field *types.Field
+}
+
+// dotpath computes the unique shortest explicit selector path to fully qualify
+// a selection expression x.f, where x is of type t and f is the symbol s.
+// If no such path exists, dotpath returns nil.
+// If there are multiple shortest paths to the same depth, ambig is true.
+func dotpath(s *types.Sym, t *types.Type, save **types.Field, ignorecase bool) (path []dlist, ambig bool) {
+ // The embedding of types within structs imposes a tree structure onto
+ // types: structs parent the types they embed, and types parent their
+ // fields or methods. Our goal here is to find the shortest path to
+ // a field or method named s in the subtree rooted at t. To accomplish
+ // that, we iteratively perform depth-first searches of increasing depth
+ // until we either find the named field/method or exhaust the tree.
+ for d := 0; ; d++ {
+ if d > len(dotlist) {
+ dotlist = append(dotlist, dlist{})
+ }
+ if c, more := adddot1(s, t, d, save, ignorecase); c == 1 {
+ return dotlist[:d], false
+ } else if c > 1 {
+ return nil, true
+ } else if !more {
+ return nil, false
+ }
+ }
+}
+
+func expand0(t *types.Type) {
+ u := t
+ if u.IsPtr() {
+ u = u.Elem()
+ }
+
+ if u.IsInterface() {
+ for _, f := range u.Fields().Slice() {
+ if f.Sym.Uniq() {
+ continue
+ }
+ f.Sym.SetUniq(true)
+ slist = append(slist, symlink{field: f})
+ }
+
+ return
+ }
+
+ u = types.ReceiverBaseType(t)
+ if u != nil {
+ for _, f := range u.Methods().Slice() {
+ if f.Sym.Uniq() {
+ continue
+ }
+ f.Sym.SetUniq(true)
+ slist = append(slist, symlink{field: f})
+ }
+ }
+}
+
+func expand1(t *types.Type, top bool) {
+ if t.Recur() {
+ return
+ }
+ t.SetRecur(true)
+
+ if !top {
+ expand0(t)
+ }
+
+ u := t
+ if u.IsPtr() {
+ u = u.Elem()
+ }
+
+ if u.IsStruct() || u.IsInterface() {
+ for _, f := range u.Fields().Slice() {
+ if f.Embedded == 0 {
+ continue
+ }
+ if f.Sym == nil {
+ continue
+ }
+ expand1(f.Type, false)
+ }
+ }
+
+ t.SetRecur(false)
+}
+
+func ifacelookdot(s *types.Sym, t *types.Type, ignorecase bool) (m *types.Field, followptr bool) {
+ if t == nil {
+ return nil, false
+ }
+
+ path, ambig := dotpath(s, t, &m, ignorecase)
+ if path == nil {
+ if ambig {
+ base.Errorf("%v.%v is ambiguous", t, s)
+ }
+ return nil, false
+ }
+
+ for _, d := range path {
+ if d.field.Type.IsPtr() {
+ followptr = true
+ break
+ }
+ }
+
+ if !m.IsMethod() {
+ base.Errorf("%v.%v is a field, not a method", t, s)
+ return nil, followptr
+ }
+
+ return m, followptr
+}
+
+func implements(t, iface *types.Type, m, samename **types.Field, ptr *int) bool {
+ t0 := t
+ if t == nil {
+ return false
+ }
+
+ if t.IsInterface() {
+ i := 0
+ tms := t.Fields().Slice()
+ for _, im := range iface.Fields().Slice() {
+ for i < len(tms) && tms[i].Sym != im.Sym {
+ i++
+ }
+ if i == len(tms) {
+ *m = im
+ *samename = nil
+ *ptr = 0
+ return false
+ }
+ tm := tms[i]
+ if !types.Identical(tm.Type, im.Type) {
+ *m = im
+ *samename = tm
+ *ptr = 0
+ return false
+ }
+ }
+
+ return true
+ }
+
+ t = types.ReceiverBaseType(t)
+ var tms []*types.Field
+ if t != nil {
+ CalcMethods(t)
+ tms = t.AllMethods().Slice()
+ }
+ i := 0
+ for _, im := range iface.Fields().Slice() {
+ if im.Broke() {
+ continue
+ }
+ for i < len(tms) && tms[i].Sym != im.Sym {
+ i++
+ }
+ if i == len(tms) {
+ *m = im
+ *samename, _ = ifacelookdot(im.Sym, t, true)
+ *ptr = 0
+ return false
+ }
+ tm := tms[i]
+ if tm.Nointerface() || !types.Identical(tm.Type, im.Type) {
+ *m = im
+ *samename = tm
+ *ptr = 0
+ return false
+ }
+ followptr := tm.Embedded == 2
+
+ // if pointer receiver in method,
+ // the method does not exist for value types.
+ rcvr := tm.Type.Recv().Type
+ if rcvr.IsPtr() && !t0.IsPtr() && !followptr && !types.IsInterfaceMethod(tm.Type) {
+ if false && base.Flag.LowerR != 0 {
+ base.Errorf("interface pointer mismatch")
+ }
+
+ *m = im
+ *samename = nil
+ *ptr = 1
+ return false
+ }
+ }
+
+ return true
+}
+
+func isptrto(t *types.Type, et types.Kind) bool {
+ if t == nil {
+ return false
+ }
+ if !t.IsPtr() {
+ return false
+ }
+ t = t.Elem()
+ if t == nil {
+ return false
+ }
+ if t.Kind() != et {
+ return false
+ }
+ return true
+}
+
+// lookdot0 returns the number of fields or methods named s associated
+// with Type t. If exactly one exists, it will be returned in *save
+// (if save is not nil).
+func lookdot0(s *types.Sym, t *types.Type, save **types.Field, ignorecase bool) int {
+ u := t
+ if u.IsPtr() {
+ u = u.Elem()
+ }
+
+ c := 0
+ if u.IsStruct() || u.IsInterface() {
+ for _, f := range u.Fields().Slice() {
+ if f.Sym == s || (ignorecase && f.IsMethod() && strings.EqualFold(f.Sym.Name, s.Name)) {
+ if save != nil {
+ *save = f
+ }
+ c++
+ }
+ }
+ }
+
+ u = t
+ if t.Sym() != nil && t.IsPtr() && !t.Elem().IsPtr() {
+ // If t is a defined pointer type, then x.m is shorthand for (*x).m.
+ u = t.Elem()
+ }
+ u = types.ReceiverBaseType(u)
+ if u != nil {
+ for _, f := range u.Methods().Slice() {
+ if f.Embedded == 0 && (f.Sym == s || (ignorecase && strings.EqualFold(f.Sym.Name, s.Name))) {
+ if save != nil {
+ *save = f
+ }
+ c++
+ }
+ }
+ }
+
+ return c
+}
+
+var slist []symlink
+
+// Code to help generate trampoline functions for methods on embedded
+// types. These are approx the same as the corresponding adddot
+// routines except that they expect to be called with unique tasks and
+// they return the actual methods.
+
+type symlink struct {
+ field *types.Field
+}
diff --git a/src/cmd/compile/internal/typecheck/syms.go b/src/cmd/compile/internal/typecheck/syms.go
new file mode 100644
index 0000000000..ab3384bf90
--- /dev/null
+++ b/src/cmd/compile/internal/typecheck/syms.go
@@ -0,0 +1,104 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typecheck
+
+import (
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/types"
+ "cmd/internal/obj"
+ "cmd/internal/src"
+)
+
+func LookupRuntime(name string) *ir.Name {
+ s := ir.Pkgs.Runtime.Lookup(name)
+ if s == nil || s.Def == nil {
+ base.Fatalf("syslook: can't find runtime.%s", name)
+ }
+ return ir.AsNode(s.Def).(*ir.Name)
+}
+
+// SubstArgTypes substitutes the given list of types for
+// successive occurrences of the "any" placeholder in the
+// type syntax expression n.Type.
+// The result of SubstArgTypes MUST be assigned back to old, e.g.
+// n.Left = SubstArgTypes(n.Left, t1, t2)
+func SubstArgTypes(old *ir.Name, types_ ...*types.Type) *ir.Name {
+ n := old.CloneName()
+
+ for _, t := range types_ {
+ types.CalcSize(t)
+ }
+ n.SetType(types.SubstAny(n.Type(), &types_))
+ if len(types_) > 0 {
+ base.Fatalf("substArgTypes: too many argument types")
+ }
+ return n
+}
+
+// AutoLabel generates a new Name node for use with
+// an automatically generated label.
+// prefix is a short mnemonic (e.g. ".s" for switch)
+// to help with debugging.
+// It should begin with "." to avoid conflicts with
+// user labels.
+func AutoLabel(prefix string) *types.Sym {
+ if prefix[0] != '.' {
+ base.Fatalf("autolabel prefix must start with '.', have %q", prefix)
+ }
+ fn := ir.CurFunc
+ if ir.CurFunc == nil {
+ base.Fatalf("autolabel outside function")
+ }
+ n := fn.Label
+ fn.Label++
+ return LookupNum(prefix, int(n))
+}
+
+func Lookup(name string) *types.Sym {
+ return types.LocalPkg.Lookup(name)
+}
+
+// loadsys loads the definitions for the low-level runtime functions,
+// so that the compiler can generate calls to them,
+// but does not make them visible to user code.
+func loadsys() {
+ types.Block = 1
+
+ inimport = true
+ TypecheckAllowed = true
+
+ typs := runtimeTypes()
+ for _, d := range &runtimeDecls {
+ sym := ir.Pkgs.Runtime.Lookup(d.name)
+ typ := typs[d.typ]
+ switch d.tag {
+ case funcTag:
+ importfunc(ir.Pkgs.Runtime, src.NoXPos, sym, typ)
+ case varTag:
+ importvar(ir.Pkgs.Runtime, src.NoXPos, sym, typ)
+ default:
+ base.Fatalf("unhandled declaration tag %v", d.tag)
+ }
+ }
+
+ TypecheckAllowed = false
+ inimport = false
+}
+
+// LookupRuntimeFunc looks up Go function name in package runtime. This function
+// must follow the internal calling convention.
+func LookupRuntimeFunc(name string) *obj.LSym {
+ s := ir.Pkgs.Runtime.Lookup(name)
+ s.SetFunc(true)
+ return s.Linksym()
+}
+
+// LookupRuntimeVar looks up a variable (or assembly function) name in package
+// runtime. If this is a function, it may have a special calling
+// convention.
+func LookupRuntimeVar(name string) *obj.LSym {
+ return ir.Pkgs.Runtime.Lookup(name).Linksym()
+}
diff --git a/src/cmd/compile/internal/typecheck/target.go b/src/cmd/compile/internal/typecheck/target.go
new file mode 100644
index 0000000000..018614d68b
--- /dev/null
+++ b/src/cmd/compile/internal/typecheck/target.go
@@ -0,0 +1,12 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:generate go run mkbuiltin.go
+
+package typecheck
+
+import "cmd/compile/internal/ir"
+
+// Target is the package being compiled.
+var Target *ir.Package
diff --git a/src/cmd/compile/internal/typecheck/typecheck.go b/src/cmd/compile/internal/typecheck/typecheck.go
new file mode 100644
index 0000000000..2abf0a7824
--- /dev/null
+++ b/src/cmd/compile/internal/typecheck/typecheck.go
@@ -0,0 +1,4180 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typecheck
+
+import (
+ "fmt"
+ "go/constant"
+ "go/token"
+ "strings"
+
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/types"
+)
+
+// Function collecting autotmps generated during typechecking,
+// to be included in the package-level init function.
+var InitTodoFunc = ir.NewFunc(base.Pos)
+
+var inimport bool // set during import
+
+var decldepth int32
+
+var TypecheckAllowed bool
+
+var (
+ NeedFuncSym = func(*types.Sym) {}
+ NeedITab = func(t, itype *types.Type) {}
+ NeedRuntimeType = func(*types.Type) {}
+)
+
+func Init() {
+ initUniverse()
+ DeclContext = ir.PEXTERN
+ base.Timer.Start("fe", "loadsys")
+ loadsys()
+}
+
+func Package() {
+ declareUniverse()
+
+ TypecheckAllowed = true
+
+ // Process top-level declarations in phases.
+
+ // Phase 1: const, type, and names and types of funcs.
+ // This will gather all the information about types
+ // and methods but doesn't depend on any of it.
+ //
+ // We also defer type alias declarations until phase 2
+ // to avoid cycles like #18640.
+ // TODO(gri) Remove this again once we have a fix for #25838.
+
+ // Don't use range--typecheck can add closures to Target.Decls.
+ base.Timer.Start("fe", "typecheck", "top1")
+ for i := 0; i < len(Target.Decls); i++ {
+ n := Target.Decls[i]
+ if op := n.Op(); op != ir.ODCL && op != ir.OAS && op != ir.OAS2 && (op != ir.ODCLTYPE || !n.(*ir.Decl).X.Name().Alias()) {
+ Target.Decls[i] = Stmt(n)
+ }
+ }
+
+ // Phase 2: Variable assignments.
+ // To check interface assignments, depends on phase 1.
+
+ // Don't use range--typecheck can add closures to Target.Decls.
+ base.Timer.Start("fe", "typecheck", "top2")
+ for i := 0; i < len(Target.Decls); i++ {
+ n := Target.Decls[i]
+ if op := n.Op(); op == ir.ODCL || op == ir.OAS || op == ir.OAS2 || op == ir.ODCLTYPE && n.(*ir.Decl).X.Name().Alias() {
+ Target.Decls[i] = Stmt(n)
+ }
+ }
+
+ // Phase 3: Type check function bodies.
+ // Don't use range--typecheck can add closures to Target.Decls.
+ base.Timer.Start("fe", "typecheck", "func")
+ var fcount int64
+ for i := 0; i < len(Target.Decls); i++ {
+ n := Target.Decls[i]
+ if n.Op() == ir.ODCLFUNC {
+ FuncBody(n.(*ir.Func))
+ fcount++
+ }
+ }
+
+ // Phase 4: Check external declarations.
+ // TODO(mdempsky): This should be handled when type checking their
+ // corresponding ODCL nodes.
+ base.Timer.Start("fe", "typecheck", "externdcls")
+ for i, n := range Target.Externs {
+ if n.Op() == ir.ONAME {
+ Target.Externs[i] = Expr(Target.Externs[i])
+ }
+ }
+
+ // Phase 5: With all user code type-checked, it's now safe to verify map keys.
+ CheckMapKeys()
+
+ // Phase 6: Decide how to capture closed variables.
+ // This needs to run before escape analysis,
+ // because variables captured by value do not escape.
+ base.Timer.Start("fe", "capturevars")
+ for _, n := range Target.Decls {
+ if n.Op() == ir.ODCLFUNC {
+ n := n.(*ir.Func)
+ if n.OClosure != nil {
+ ir.CurFunc = n
+ CaptureVars(n)
+ }
+ }
+ }
+ CaptureVarsComplete = true
+ ir.CurFunc = nil
+
+ if base.Debug.TypecheckInl != 0 {
+ // Typecheck imported function bodies if Debug.l > 1,
+ // otherwise lazily when used or re-exported.
+ AllImportedBodies()
+ }
+}
+
+func AssignExpr(n ir.Node) ir.Node { return check(n, ctxExpr|ctxAssign) }
+func Expr(n ir.Node) ir.Node { return check(n, ctxExpr) }
+func Stmt(n ir.Node) ir.Node { return check(n, ctxStmt) }
+
+func Exprs(exprs []ir.Node) { typecheckslice(exprs, ctxExpr) }
+func Stmts(stmts []ir.Node) { typecheckslice(stmts, ctxStmt) }
+
+func Call(call *ir.CallExpr) {
+ t := call.X.Type()
+ if t == nil {
+ panic("misuse of Call")
+ }
+ ctx := ctxStmt
+ if t.NumResults() > 0 {
+ ctx = ctxExpr | ctxMultiOK
+ }
+ if check(call, ctx) != call {
+ panic("bad typecheck")
+ }
+}
+
+func Callee(n ir.Node) ir.Node {
+ return check(n, ctxExpr|ctxCallee)
+}
+
+func FuncBody(n *ir.Func) {
+ ir.CurFunc = n
+ decldepth = 1
+ errorsBefore := base.Errors()
+ Stmts(n.Body)
+ CheckReturn(n)
+ if base.Errors() > errorsBefore {
+ n.Body.Set(nil) // type errors; do not compile
+ }
+ // Now that we've checked whether n terminates,
+ // we can eliminate some obviously dead code.
+ deadcode(n)
+}
+
+var importlist []*ir.Func
+
+func AllImportedBodies() {
+ for _, n := range importlist {
+ if n.Inl != nil {
+ ImportedBody(n)
+ }
+ }
+}
+
+var traceIndent []byte
+
+func tracePrint(title string, n ir.Node) func(np *ir.Node) {
+ indent := traceIndent
+
+ // guard against nil
+ var pos, op string
+ var tc uint8
+ if n != nil {
+ pos = base.FmtPos(n.Pos())
+ op = n.Op().String()
+ tc = n.Typecheck()
+ }
+
+ types.SkipSizeForTracing = true
+ defer func() { types.SkipSizeForTracing = false }()
+ fmt.Printf("%s: %s%s %p %s %v tc=%d\n", pos, indent, title, n, op, n, tc)
+ traceIndent = append(traceIndent, ". "...)
+
+ return func(np *ir.Node) {
+ traceIndent = traceIndent[:len(traceIndent)-2]
+
+ // if we have a result, use that
+ if np != nil {
+ n = *np
+ }
+
+ // guard against nil
+ // use outer pos, op so we don't get empty pos/op if n == nil (nicer output)
+ var tc uint8
+ var typ *types.Type
+ if n != nil {
+ pos = base.FmtPos(n.Pos())
+ op = n.Op().String()
+ tc = n.Typecheck()
+ typ = n.Type()
+ }
+
+ types.SkipSizeForTracing = true
+ defer func() { types.SkipSizeForTracing = false }()
+ fmt.Printf("%s: %s=> %p %s %v tc=%d type=%L\n", pos, indent, n, op, n, tc, typ)
+ }
+}
+
+const (
+ ctxStmt = 1 << iota // evaluated at statement level
+ ctxExpr // evaluated in value context
+ ctxType // evaluated in type context
+ ctxCallee // call-only expressions are ok
+ ctxMultiOK // multivalue function returns are ok
+ ctxAssign // assigning to expression
+)
+
+// type checks the whole tree of an expression.
+// calculates expression types.
+// evaluates compile time constants.
+// marks variables that escape the local frame.
+// rewrites n.Op to be more specific in some cases.
+
+var typecheckdefstack []ir.Node
+
+// Resolve ONONAME to definition, if any.
+func Resolve(n ir.Node) (res ir.Node) {
+ if n == nil || n.Op() != ir.ONONAME {
+ return n
+ }
+
+ // only trace if there's work to do
+ if base.EnableTrace && base.Flag.LowerT {
+ defer tracePrint("resolve", n)(&res)
+ }
+
+ if sym := n.Sym(); sym.Pkg != types.LocalPkg {
+ // We might have an ir.Ident from oldname or importDot.
+ if id, ok := n.(*ir.Ident); ok {
+ if pkgName := DotImportRefs[id]; pkgName != nil {
+ pkgName.Used = true
+ }
+ }
+
+ if inimport {
+ base.Fatalf("recursive inimport")
+ }
+ inimport = true
+ n = expandDecl(n)
+ inimport = false
+ return n
+ }
+
+ r := ir.AsNode(n.Sym().Def)
+ if r == nil {
+ return n
+ }
+
+ if r.Op() == ir.OIOTA {
+ if x := getIotaValue(); x >= 0 {
+ return ir.NewInt(x)
+ }
+ return n
+ }
+
+ return r
+}
+
+func typecheckslice(l []ir.Node, top int) {
+ for i := range l {
+ l[i] = check(l[i], top)
+ }
+}
+
+var _typekind = []string{
+ types.TINT: "int",
+ types.TUINT: "uint",
+ types.TINT8: "int8",
+ types.TUINT8: "uint8",
+ types.TINT16: "int16",
+ types.TUINT16: "uint16",
+ types.TINT32: "int32",
+ types.TUINT32: "uint32",
+ types.TINT64: "int64",
+ types.TUINT64: "uint64",
+ types.TUINTPTR: "uintptr",
+ types.TCOMPLEX64: "complex64",
+ types.TCOMPLEX128: "complex128",
+ types.TFLOAT32: "float32",
+ types.TFLOAT64: "float64",
+ types.TBOOL: "bool",
+ types.TSTRING: "string",
+ types.TPTR: "pointer",
+ types.TUNSAFEPTR: "unsafe.Pointer",
+ types.TSTRUCT: "struct",
+ types.TINTER: "interface",
+ types.TCHAN: "chan",
+ types.TMAP: "map",
+ types.TARRAY: "array",
+ types.TSLICE: "slice",
+ types.TFUNC: "func",
+ types.TNIL: "nil",
+ types.TIDEAL: "untyped number",
+}
+
+func typekind(t *types.Type) string {
+ if t.IsUntyped() {
+ return fmt.Sprintf("%v", t)
+ }
+ et := t.Kind()
+ if int(et) < len(_typekind) {
+ s := _typekind[et]
+ if s != "" {
+ return s
+ }
+ }
+ return fmt.Sprintf("etype=%d", et)
+}
+
+func cycleFor(start ir.Node) []ir.Node {
+ // Find the start node in typecheck_tcstack.
+ // We know that it must exist because each time we mark
+ // a node with n.SetTypecheck(2) we push it on the stack,
+ // and each time we mark a node with n.SetTypecheck(2) we
+ // pop it from the stack. We hit a cycle when we encounter
+ // a node marked 2 in which case is must be on the stack.
+ i := len(typecheck_tcstack) - 1
+ for i > 0 && typecheck_tcstack[i] != start {
+ i--
+ }
+
+ // collect all nodes with same Op
+ var cycle []ir.Node
+ for _, n := range typecheck_tcstack[i:] {
+ if n.Op() == start.Op() {
+ cycle = append(cycle, n)
+ }
+ }
+
+ return cycle
+}
+
+func cycleTrace(cycle []ir.Node) string {
+ var s string
+ for i, n := range cycle {
+ s += fmt.Sprintf("\n\t%v: %v uses %v", ir.Line(n), n, cycle[(i+1)%len(cycle)])
+ }
+ return s
+}
+
+var typecheck_tcstack []ir.Node
+
+func Func(fn *ir.Func) {
+ new := Stmt(fn)
+ if new != fn {
+ base.Fatalf("typecheck changed func")
+ }
+}
+
+func typecheckNtype(n ir.Ntype) ir.Ntype {
+ return check(n, ctxType).(ir.Ntype)
+}
+
+// check type checks node n.
+// The result of check MUST be assigned back to n, e.g.
+// n.Left = check(n.Left, top)
+func check(n ir.Node, top int) (res ir.Node) {
+ // cannot type check until all the source has been parsed
+ if !TypecheckAllowed {
+ base.Fatalf("early typecheck")
+ }
+
+ if n == nil {
+ return nil
+ }
+
+ // only trace if there's work to do
+ if base.EnableTrace && base.Flag.LowerT {
+ defer tracePrint("typecheck", n)(&res)
+ }
+
+ lno := ir.SetPos(n)
+
+ // Skip over parens.
+ for n.Op() == ir.OPAREN {
+ n = n.(*ir.ParenExpr).X
+ }
+
+ // Resolve definition of name and value of iota lazily.
+ n = Resolve(n)
+
+ // Skip typecheck if already done.
+ // But re-typecheck ONAME/OTYPE/OLITERAL/OPACK node in case context has changed.
+ if n.Typecheck() == 1 {
+ switch n.Op() {
+ case ir.ONAME, ir.OTYPE, ir.OLITERAL, ir.OPACK:
+ break
+
+ default:
+ base.Pos = lno
+ return n
+ }
+ }
+
+ if n.Typecheck() == 2 {
+ // Typechecking loop. Trying printing a meaningful message,
+ // otherwise a stack trace of typechecking.
+ switch n.Op() {
+ // We can already diagnose variables used as types.
+ case ir.ONAME:
+ n := n.(*ir.Name)
+ if top&(ctxExpr|ctxType) == ctxType {
+ base.Errorf("%v is not a type", n)
+ }
+
+ case ir.OTYPE:
+ // Only report a type cycle if we are expecting a type.
+ // Otherwise let other code report an error.
+ if top&ctxType == ctxType {
+ // A cycle containing only alias types is an error
+ // since it would expand indefinitely when aliases
+ // are substituted.
+ cycle := cycleFor(n)
+ for _, n1 := range cycle {
+ if n1.Name() != nil && !n1.Name().Alias() {
+ // Cycle is ok. But if n is an alias type and doesn't
+ // have a type yet, we have a recursive type declaration
+ // with aliases that we can't handle properly yet.
+ // Report an error rather than crashing later.
+ if n.Name() != nil && n.Name().Alias() && n.Type() == nil {
+ base.Pos = n.Pos()
+ base.Fatalf("cannot handle alias type declaration (issue #25838): %v", n)
+ }
+ base.Pos = lno
+ return n
+ }
+ }
+ base.ErrorfAt(n.Pos(), "invalid recursive type alias %v%s", n, cycleTrace(cycle))
+ }
+
+ case ir.OLITERAL:
+ if top&(ctxExpr|ctxType) == ctxType {
+ base.Errorf("%v is not a type", n)
+ break
+ }
+ base.ErrorfAt(n.Pos(), "constant definition loop%s", cycleTrace(cycleFor(n)))
+ }
+
+ if base.Errors() == 0 {
+ var trace string
+ for i := len(typecheck_tcstack) - 1; i >= 0; i-- {
+ x := typecheck_tcstack[i]
+ trace += fmt.Sprintf("\n\t%v %v", ir.Line(x), x)
+ }
+ base.Errorf("typechecking loop involving %v%s", n, trace)
+ }
+
+ base.Pos = lno
+ return n
+ }
+
+ typecheck_tcstack = append(typecheck_tcstack, n)
+
+ n.SetTypecheck(2)
+ n = typecheck1(n, top)
+ n.SetTypecheck(1)
+
+ last := len(typecheck_tcstack) - 1
+ typecheck_tcstack[last] = nil
+ typecheck_tcstack = typecheck_tcstack[:last]
+
+ _, isExpr := n.(ir.Expr)
+ _, isStmt := n.(ir.Stmt)
+ isMulti := false
+ switch n.Op() {
+ case ir.OCALLFUNC, ir.OCALLINTER, ir.OCALLMETH:
+ n := n.(*ir.CallExpr)
+ if t := n.X.Type(); t != nil && t.Kind() == types.TFUNC {
+ nr := t.NumResults()
+ isMulti = nr > 1
+ if nr == 0 {
+ isExpr = false
+ }
+ }
+ case ir.OAPPEND:
+ // Must be used (and not BinaryExpr/UnaryExpr).
+ isStmt = false
+ case ir.OCLOSE, ir.ODELETE, ir.OPANIC, ir.OPRINT, ir.OPRINTN, ir.OVARKILL, ir.OVARLIVE:
+ // Must not be used.
+ isExpr = false
+ isStmt = true
+ case ir.OCOPY, ir.ORECOVER, ir.ORECV:
+ // Can be used or not.
+ isStmt = true
+ }
+
+ t := n.Type()
+ if t != nil && !t.IsFuncArgStruct() && n.Op() != ir.OTYPE {
+ switch t.Kind() {
+ case types.TFUNC, // might have TANY; wait until it's called
+ types.TANY, types.TFORW, types.TIDEAL, types.TNIL, types.TBLANK:
+ break
+
+ default:
+ types.CheckSize(t)
+ }
+ }
+ if t != nil {
+ n = EvalConst(n)
+ t = n.Type()
+ }
+
+ // TODO(rsc): Lots of the complexity here is because typecheck can
+ // see OTYPE, ONAME, and OLITERAL nodes multiple times.
+ // Once we make the IR a proper tree, we should be able to simplify
+ // this code a bit, especially the final case.
+ switch {
+ case top&(ctxStmt|ctxExpr) == ctxExpr && !isExpr && n.Op() != ir.OTYPE && !isMulti:
+ if !n.Diag() {
+ base.Errorf("%v used as value", n)
+ n.SetDiag(true)
+ }
+ if t != nil {
+ n.SetType(nil)
+ }
+
+ case top&ctxType == 0 && n.Op() == ir.OTYPE && t != nil:
+ if !n.Type().Broke() {
+ base.Errorf("type %v is not an expression", n.Type())
+ }
+ n.SetType(nil)
+
+ case top&(ctxStmt|ctxExpr) == ctxStmt && !isStmt && t != nil:
+ if !n.Diag() {
+ base.Errorf("%v evaluated but not used", n)
+ n.SetDiag(true)
+ }
+ n.SetType(nil)
+
+ case top&(ctxType|ctxExpr) == ctxType && n.Op() != ir.OTYPE && n.Op() != ir.ONONAME && (t != nil || n.Op() == ir.ONAME):
+ base.Errorf("%v is not a type", n)
+ if t != nil {
+ n.SetType(nil)
+ }
+
+ }
+
+ base.Pos = lno
+ return n
+}
+
+// indexlit implements typechecking of untyped values as
+// array/slice indexes. It is almost equivalent to defaultlit
+// but also accepts untyped numeric values representable as
+// value of type int (see also checkmake for comparison).
+// The result of indexlit MUST be assigned back to n, e.g.
+// n.Left = indexlit(n.Left)
+func indexlit(n ir.Node) ir.Node {
+ if n != nil && n.Type() != nil && n.Type().Kind() == types.TIDEAL {
+ return DefaultLit(n, types.Types[types.TINT])
+ }
+ return n
+}
+
+// typecheck1 should ONLY be called from typecheck.
+func typecheck1(n ir.Node, top int) (res ir.Node) {
+ if base.EnableTrace && base.Flag.LowerT {
+ defer tracePrint("typecheck1", n)(&res)
+ }
+
+ switch n.Op() {
+ case ir.OLITERAL, ir.ONAME, ir.ONONAME, ir.OTYPE:
+ if n.Sym() == nil {
+ return n
+ }
+
+ if n.Op() == ir.ONAME {
+ n := n.(*ir.Name)
+ if n.BuiltinOp != 0 && top&ctxCallee == 0 {
+ base.Errorf("use of builtin %v not in function call", n.Sym())
+ n.SetType(nil)
+ return n
+ }
+ }
+
+ typecheckdef(n)
+ if n.Op() == ir.ONONAME {
+ n.SetType(nil)
+ return n
+ }
+ }
+
+ switch n.Op() {
+ default:
+ ir.Dump("typecheck", n)
+ base.Fatalf("typecheck %v", n.Op())
+ panic("unreachable")
+
+ // names
+ case ir.OLITERAL:
+ if n.Type() == nil && n.Val().Kind() == constant.String {
+ base.Fatalf("string literal missing type")
+ }
+ return n
+
+ case ir.ONIL, ir.ONONAME:
+ return n
+
+ case ir.ONAME:
+ n := n.(*ir.Name)
+ if n.Name().Decldepth == 0 {
+ n.Name().Decldepth = decldepth
+ }
+ if n.BuiltinOp != 0 {
+ return n
+ }
+ if top&ctxAssign == 0 {
+ // not a write to the variable
+ if ir.IsBlank(n) {
+ base.Errorf("cannot use _ as value")
+ n.SetType(nil)
+ return n
+ }
+ n.Name().SetUsed(true)
+ }
+ return n
+
+ case ir.ONAMEOFFSET:
+ // type already set
+ return n
+
+ case ir.OPACK:
+ n := n.(*ir.PkgName)
+ base.Errorf("use of package %v without selector", n.Sym())
+ n.SetType(nil)
+ return n
+
+ // types (ODEREF is with exprs)
+ case ir.OTYPE:
+ if n.Type() == nil {
+ return n
+ }
+ return n
+
+ case ir.OTSLICE:
+ n := n.(*ir.SliceType)
+ n.Elem = check(n.Elem, ctxType)
+ if n.Elem.Type() == nil {
+ return n
+ }
+ t := types.NewSlice(n.Elem.Type())
+ n.SetOTYPE(t)
+ types.CheckSize(t)
+ return n
+
+ case ir.OTARRAY:
+ n := n.(*ir.ArrayType)
+ n.Elem = check(n.Elem, ctxType)
+ if n.Elem.Type() == nil {
+ return n
+ }
+ if n.Len == nil { // [...]T
+ if !n.Diag() {
+ n.SetDiag(true)
+ base.Errorf("use of [...] array outside of array literal")
+ }
+ return n
+ }
+ n.Len = indexlit(Expr(n.Len))
+ size := n.Len
+ if ir.ConstType(size) != constant.Int {
+ switch {
+ case size.Type() == nil:
+ // Error already reported elsewhere.
+ case size.Type().IsInteger() && size.Op() != ir.OLITERAL:
+ base.Errorf("non-constant array bound %v", size)
+ default:
+ base.Errorf("invalid array bound %v", size)
+ }
+ return n
+ }
+
+ v := size.Val()
+ if ir.ConstOverflow(v, types.Types[types.TINT]) {
+ base.Errorf("array bound is too large")
+ return n
+ }
+
+ if constant.Sign(v) < 0 {
+ base.Errorf("array bound must be non-negative")
+ return n
+ }
+
+ bound, _ := constant.Int64Val(v)
+ t := types.NewArray(n.Elem.Type(), bound)
+ n.SetOTYPE(t)
+ types.CheckSize(t)
+ return n
+
+ case ir.OTMAP:
+ n := n.(*ir.MapType)
+ n.Key = check(n.Key, ctxType)
+ n.Elem = check(n.Elem, ctxType)
+ l := n.Key
+ r := n.Elem
+ if l.Type() == nil || r.Type() == nil {
+ return n
+ }
+ if l.Type().NotInHeap() {
+ base.Errorf("incomplete (or unallocatable) map key not allowed")
+ }
+ if r.Type().NotInHeap() {
+ base.Errorf("incomplete (or unallocatable) map value not allowed")
+ }
+ n.SetOTYPE(types.NewMap(l.Type(), r.Type()))
+ mapqueue = append(mapqueue, n) // check map keys when all types are settled
+ return n
+
+ case ir.OTCHAN:
+ n := n.(*ir.ChanType)
+ n.Elem = check(n.Elem, ctxType)
+ l := n.Elem
+ if l.Type() == nil {
+ return n
+ }
+ if l.Type().NotInHeap() {
+ base.Errorf("chan of incomplete (or unallocatable) type not allowed")
+ }
+ n.SetOTYPE(types.NewChan(l.Type(), n.Dir))
+ return n
+
+ case ir.OTSTRUCT:
+ n := n.(*ir.StructType)
+ n.SetOTYPE(NewStructType(n.Fields))
+ return n
+
+ case ir.OTINTER:
+ n := n.(*ir.InterfaceType)
+ n.SetOTYPE(tointerface(n.Methods))
+ return n
+
+ case ir.OTFUNC:
+ n := n.(*ir.FuncType)
+ n.SetOTYPE(NewFuncType(n.Recv, n.Params, n.Results))
+ return n
+
+ // type or expr
+ case ir.ODEREF:
+ n := n.(*ir.StarExpr)
+ n.X = check(n.X, ctxExpr|ctxType)
+ l := n.X
+ t := l.Type()
+ if t == nil {
+ n.SetType(nil)
+ return n
+ }
+ if l.Op() == ir.OTYPE {
+ n.SetOTYPE(types.NewPtr(l.Type()))
+ // Ensure l.Type gets dowidth'd for the backend. Issue 20174.
+ types.CheckSize(l.Type())
+ return n
+ }
+
+ if !t.IsPtr() {
+ if top&(ctxExpr|ctxStmt) != 0 {
+ base.Errorf("invalid indirect of %L", n.X)
+ n.SetType(nil)
+ return n
+ }
+ base.Errorf("%v is not a type", l)
+ return n
+ }
+
+ n.SetType(t.Elem())
+ return n
+
+ // arithmetic exprs
+ case ir.OASOP,
+ ir.OADD,
+ ir.OAND,
+ ir.OANDAND,
+ ir.OANDNOT,
+ ir.ODIV,
+ ir.OEQ,
+ ir.OGE,
+ ir.OGT,
+ ir.OLE,
+ ir.OLT,
+ ir.OLSH,
+ ir.ORSH,
+ ir.OMOD,
+ ir.OMUL,
+ ir.ONE,
+ ir.OOR,
+ ir.OOROR,
+ ir.OSUB,
+ ir.OXOR:
+ var l, r ir.Node
+ var setLR func()
+ switch n := n.(type) {
+ case *ir.AssignOpStmt:
+ l, r = n.X, n.Y
+ setLR = func() { n.X = l; n.Y = r }
+ case *ir.BinaryExpr:
+ l, r = n.X, n.Y
+ setLR = func() { n.X = l; n.Y = r }
+ case *ir.LogicalExpr:
+ l, r = n.X, n.Y
+ setLR = func() { n.X = l; n.Y = r }
+ }
+ l = Expr(l)
+ r = Expr(r)
+ setLR()
+ if l.Type() == nil || r.Type() == nil {
+ n.SetType(nil)
+ return n
+ }
+ op := n.Op()
+ if n.Op() == ir.OASOP {
+ n := n.(*ir.AssignOpStmt)
+ checkassign(n, l)
+ if n.IncDec && !okforarith[l.Type().Kind()] {
+ base.Errorf("invalid operation: %v (non-numeric type %v)", n, l.Type())
+ n.SetType(nil)
+ return n
+ }
+ // TODO(marvin): Fix Node.EType type union.
+ op = n.AsOp
+ }
+ if op == ir.OLSH || op == ir.ORSH {
+ r = DefaultLit(r, types.Types[types.TUINT])
+ setLR()
+ t := r.Type()
+ if !t.IsInteger() {
+ base.Errorf("invalid operation: %v (shift count type %v, must be integer)", n, r.Type())
+ n.SetType(nil)
+ return n
+ }
+ if t.IsSigned() && !types.AllowsGoVersion(curpkg(), 1, 13) {
+ base.ErrorfVers("go1.13", "invalid operation: %v (signed shift count type %v)", n, r.Type())
+ n.SetType(nil)
+ return n
+ }
+ t = l.Type()
+ if t != nil && t.Kind() != types.TIDEAL && !t.IsInteger() {
+ base.Errorf("invalid operation: %v (shift of type %v)", n, t)
+ n.SetType(nil)
+ return n
+ }
+
+ // no defaultlit for left
+ // the outer context gives the type
+ n.SetType(l.Type())
+ if (l.Type() == types.UntypedFloat || l.Type() == types.UntypedComplex) && r.Op() == ir.OLITERAL {
+ n.SetType(types.UntypedInt)
+ }
+ return n
+ }
+
+ // For "x == x && len(s)", it's better to report that "len(s)" (type int)
+ // can't be used with "&&" than to report that "x == x" (type untyped bool)
+ // can't be converted to int (see issue #41500).
+ if n.Op() == ir.OANDAND || n.Op() == ir.OOROR {
+ n := n.(*ir.LogicalExpr)
+ if !n.X.Type().IsBoolean() {
+ base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, n.Op(), typekind(n.X.Type()))
+ n.SetType(nil)
+ return n
+ }
+ if !n.Y.Type().IsBoolean() {
+ base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, n.Op(), typekind(n.Y.Type()))
+ n.SetType(nil)
+ return n
+ }
+ }
+
+ // ideal mixed with non-ideal
+ l, r = defaultlit2(l, r, false)
+ setLR()
+
+ if l.Type() == nil || r.Type() == nil {
+ n.SetType(nil)
+ return n
+ }
+ t := l.Type()
+ if t.Kind() == types.TIDEAL {
+ t = r.Type()
+ }
+ et := t.Kind()
+ if et == types.TIDEAL {
+ et = types.TINT
+ }
+ aop := ir.OXXX
+ if iscmp[n.Op()] && t.Kind() != types.TIDEAL && !types.Identical(l.Type(), r.Type()) {
+ // comparison is okay as long as one side is
+ // assignable to the other. convert so they have
+ // the same type.
+ //
+ // the only conversion that isn't a no-op is concrete == interface.
+ // in that case, check comparability of the concrete type.
+ // The conversion allocates, so only do it if the concrete type is huge.
+ converted := false
+ if r.Type().Kind() != types.TBLANK {
+ aop, _ = assignop(l.Type(), r.Type())
+ if aop != ir.OXXX {
+ if r.Type().IsInterface() && !l.Type().IsInterface() && !types.IsComparable(l.Type()) {
+ base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(l.Type()))
+ n.SetType(nil)
+ return n
+ }
+
+ types.CalcSize(l.Type())
+ if r.Type().IsInterface() == l.Type().IsInterface() || l.Type().Width >= 1<<16 {
+ l = ir.NewConvExpr(base.Pos, aop, r.Type(), l)
+ l.SetTypecheck(1)
+ setLR()
+ }
+
+ t = r.Type()
+ converted = true
+ }
+ }
+
+ if !converted && l.Type().Kind() != types.TBLANK {
+ aop, _ = assignop(r.Type(), l.Type())
+ if aop != ir.OXXX {
+ if l.Type().IsInterface() && !r.Type().IsInterface() && !types.IsComparable(r.Type()) {
+ base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(r.Type()))
+ n.SetType(nil)
+ return n
+ }
+
+ types.CalcSize(r.Type())
+ if r.Type().IsInterface() == l.Type().IsInterface() || r.Type().Width >= 1<<16 {
+ r = ir.NewConvExpr(base.Pos, aop, l.Type(), r)
+ r.SetTypecheck(1)
+ setLR()
+ }
+
+ t = l.Type()
+ }
+ }
+
+ et = t.Kind()
+ }
+
+ if t.Kind() != types.TIDEAL && !types.Identical(l.Type(), r.Type()) {
+ l, r = defaultlit2(l, r, true)
+ if l.Type() == nil || r.Type() == nil {
+ n.SetType(nil)
+ return n
+ }
+ if l.Type().IsInterface() == r.Type().IsInterface() || aop == 0 {
+ base.Errorf("invalid operation: %v (mismatched types %v and %v)", n, l.Type(), r.Type())
+ n.SetType(nil)
+ return n
+ }
+ }
+
+ if t.Kind() == types.TIDEAL {
+ t = mixUntyped(l.Type(), r.Type())
+ }
+ if dt := defaultType(t); !okfor[op][dt.Kind()] {
+ base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(t))
+ n.SetType(nil)
+ return n
+ }
+
+ // okfor allows any array == array, map == map, func == func.
+ // restrict to slice/map/func == nil and nil == slice/map/func.
+ if l.Type().IsArray() && !types.IsComparable(l.Type()) {
+ base.Errorf("invalid operation: %v (%v cannot be compared)", n, l.Type())
+ n.SetType(nil)
+ return n
+ }
+
+ if l.Type().IsSlice() && !ir.IsNil(l) && !ir.IsNil(r) {
+ base.Errorf("invalid operation: %v (slice can only be compared to nil)", n)
+ n.SetType(nil)
+ return n
+ }
+
+ if l.Type().IsMap() && !ir.IsNil(l) && !ir.IsNil(r) {
+ base.Errorf("invalid operation: %v (map can only be compared to nil)", n)
+ n.SetType(nil)
+ return n
+ }
+
+ if l.Type().Kind() == types.TFUNC && !ir.IsNil(l) && !ir.IsNil(r) {
+ base.Errorf("invalid operation: %v (func can only be compared to nil)", n)
+ n.SetType(nil)
+ return n
+ }
+
+ if l.Type().IsStruct() {
+ if f := types.IncomparableField(l.Type()); f != nil {
+ base.Errorf("invalid operation: %v (struct containing %v cannot be compared)", n, f.Type)
+ n.SetType(nil)
+ return n
+ }
+ }
+
+ if iscmp[n.Op()] {
+ t = types.UntypedBool
+ n.SetType(t)
+ if con := EvalConst(n); con.Op() == ir.OLITERAL {
+ return con
+ }
+ l, r = defaultlit2(l, r, true)
+ setLR()
+ return n
+ }
+
+ if et == types.TSTRING && n.Op() == ir.OADD {
+ // create or update OADDSTR node with list of strings in x + y + z + (w + v) + ...
+ n := n.(*ir.BinaryExpr)
+ var add *ir.AddStringExpr
+ if l.Op() == ir.OADDSTR {
+ add = l.(*ir.AddStringExpr)
+ add.SetPos(n.Pos())
+ } else {
+ add = ir.NewAddStringExpr(n.Pos(), []ir.Node{l})
+ }
+ if r.Op() == ir.OADDSTR {
+ r := r.(*ir.AddStringExpr)
+ add.List.Append(r.List.Take()...)
+ } else {
+ add.List.Append(r)
+ }
+ add.SetType(t)
+ return add
+ }
+
+ if (op == ir.ODIV || op == ir.OMOD) && ir.IsConst(r, constant.Int) {
+ if constant.Sign(r.Val()) == 0 {
+ base.Errorf("division by zero")
+ n.SetType(nil)
+ return n
+ }
+ }
+
+ n.SetType(t)
+ return n
+
+ case ir.OBITNOT, ir.ONEG, ir.ONOT, ir.OPLUS:
+ n := n.(*ir.UnaryExpr)
+ n.X = Expr(n.X)
+ l := n.X
+ t := l.Type()
+ if t == nil {
+ n.SetType(nil)
+ return n
+ }
+ if !okfor[n.Op()][defaultType(t).Kind()] {
+ base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, n.Op(), typekind(t))
+ n.SetType(nil)
+ return n
+ }
+
+ n.SetType(t)
+ return n
+
+ // exprs
+ case ir.OADDR:
+ n := n.(*ir.AddrExpr)
+ n.X = Expr(n.X)
+ if n.X.Type() == nil {
+ n.SetType(nil)
+ return n
+ }
+
+ switch n.X.Op() {
+ case ir.OARRAYLIT, ir.OMAPLIT, ir.OSLICELIT, ir.OSTRUCTLIT:
+ n.SetOp(ir.OPTRLIT)
+
+ default:
+ checklvalue(n.X, "take the address of")
+ r := ir.OuterValue(n.X)
+ if r.Op() == ir.ONAME {
+ r := r.(*ir.Name)
+ if ir.Orig(r) != r {
+ base.Fatalf("found non-orig name node %v", r) // TODO(mdempsky): What does this mean?
+ }
+ r.Name().SetAddrtaken(true)
+ if r.Name().IsClosureVar() && !CaptureVarsComplete {
+ // Mark the original variable as Addrtaken so that capturevars
+ // knows not to pass it by value.
+ // But if the capturevars phase is complete, don't touch it,
+ // in case l.Name's containing function has not yet been compiled.
+ r.Name().Defn.Name().SetAddrtaken(true)
+ }
+ }
+ n.X = DefaultLit(n.X, nil)
+ if n.X.Type() == nil {
+ n.SetType(nil)
+ return n
+ }
+ }
+
+ n.SetType(types.NewPtr(n.X.Type()))
+ return n
+
+ case ir.OCOMPLIT:
+ return typecheckcomplit(n.(*ir.CompLitExpr))
+
+ case ir.OXDOT, ir.ODOT:
+ n := n.(*ir.SelectorExpr)
+ if n.Op() == ir.OXDOT {
+ n = AddImplicitDots(n)
+ n.SetOp(ir.ODOT)
+ if n.X == nil {
+ n.SetType(nil)
+ return n
+ }
+ }
+
+ n.X = check(n.X, ctxExpr|ctxType)
+
+ n.X = DefaultLit(n.X, nil)
+
+ t := n.X.Type()
+ if t == nil {
+ base.UpdateErrorDot(ir.Line(n), fmt.Sprint(n.X), fmt.Sprint(n))
+ n.SetType(nil)
+ return n
+ }
+
+ s := n.Sel
+
+ if n.X.Op() == ir.OTYPE {
+ return typecheckMethodExpr(n)
+ }
+
+ if t.IsPtr() && !t.Elem().IsInterface() {
+ t = t.Elem()
+ if t == nil {
+ n.SetType(nil)
+ return n
+ }
+ n.SetOp(ir.ODOTPTR)
+ types.CheckSize(t)
+ }
+
+ if n.Sel.IsBlank() {
+ base.Errorf("cannot refer to blank field or method")
+ n.SetType(nil)
+ return n
+ }
+
+ if lookdot(n, t, 0) == nil {
+ // Legitimate field or method lookup failed, try to explain the error
+ switch {
+ case t.IsEmptyInterface():
+ base.Errorf("%v undefined (type %v is interface with no methods)", n, n.X.Type())
+
+ case t.IsPtr() && t.Elem().IsInterface():
+ // Pointer to interface is almost always a mistake.
+ base.Errorf("%v undefined (type %v is pointer to interface, not interface)", n, n.X.Type())
+
+ case lookdot(n, t, 1) != nil:
+ // Field or method matches by name, but it is not exported.
+ base.Errorf("%v undefined (cannot refer to unexported field or method %v)", n, n.Sel)
+
+ default:
+ if mt := lookdot(n, t, 2); mt != nil && visible(mt.Sym) { // Case-insensitive lookup.
+ base.Errorf("%v undefined (type %v has no field or method %v, but does have %v)", n, n.X.Type(), n.Sel, mt.Sym)
+ } else {
+ base.Errorf("%v undefined (type %v has no field or method %v)", n, n.X.Type(), n.Sel)
+ }
+ }
+ n.SetType(nil)
+ return n
+ }
+
+ if (n.Op() == ir.ODOTINTER || n.Op() == ir.ODOTMETH) && top&ctxCallee == 0 {
+ return typecheckpartialcall(n, s)
+ }
+ return n
+
+ case ir.ODOTTYPE:
+ n := n.(*ir.TypeAssertExpr)
+ n.X = Expr(n.X)
+ n.X = DefaultLit(n.X, nil)
+ l := n.X
+ t := l.Type()
+ if t == nil {
+ n.SetType(nil)
+ return n
+ }
+ if !t.IsInterface() {
+ base.Errorf("invalid type assertion: %v (non-interface type %v on left)", n, t)
+ n.SetType(nil)
+ return n
+ }
+
+ if n.Ntype != nil {
+ n.Ntype = check(n.Ntype, ctxType)
+ n.SetType(n.Ntype.Type())
+ n.Ntype = nil
+ if n.Type() == nil {
+ return n
+ }
+ }
+
+ if n.Type() != nil && !n.Type().IsInterface() {
+ var missing, have *types.Field
+ var ptr int
+ if !implements(n.Type(), t, &missing, &have, &ptr) {
+ if have != nil && have.Sym == missing.Sym {
+ base.Errorf("impossible type assertion:\n\t%v does not implement %v (wrong type for %v method)\n"+
+ "\t\thave %v%S\n\t\twant %v%S", n.Type(), t, missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
+ } else if ptr != 0 {
+ base.Errorf("impossible type assertion:\n\t%v does not implement %v (%v method has pointer receiver)", n.Type(), t, missing.Sym)
+ } else if have != nil {
+ base.Errorf("impossible type assertion:\n\t%v does not implement %v (missing %v method)\n"+
+ "\t\thave %v%S\n\t\twant %v%S", n.Type(), t, missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
+ } else {
+ base.Errorf("impossible type assertion:\n\t%v does not implement %v (missing %v method)", n.Type(), t, missing.Sym)
+ }
+ n.SetType(nil)
+ return n
+ }
+ }
+ return n
+
+ case ir.OINDEX:
+ n := n.(*ir.IndexExpr)
+ n.X = Expr(n.X)
+ n.X = DefaultLit(n.X, nil)
+ n.X = implicitstar(n.X)
+ l := n.X
+ n.Index = Expr(n.Index)
+ r := n.Index
+ t := l.Type()
+ if t == nil || r.Type() == nil {
+ n.SetType(nil)
+ return n
+ }
+ switch t.Kind() {
+ default:
+ base.Errorf("invalid operation: %v (type %v does not support indexing)", n, t)
+ n.SetType(nil)
+ return n
+
+ case types.TSTRING, types.TARRAY, types.TSLICE:
+ n.Index = indexlit(n.Index)
+ if t.IsString() {
+ n.SetType(types.ByteType)
+ } else {
+ n.SetType(t.Elem())
+ }
+ why := "string"
+ if t.IsArray() {
+ why = "array"
+ } else if t.IsSlice() {
+ why = "slice"
+ }
+
+ if n.Index.Type() != nil && !n.Index.Type().IsInteger() {
+ base.Errorf("non-integer %s index %v", why, n.Index)
+ return n
+ }
+
+ if !n.Bounded() && ir.IsConst(n.Index, constant.Int) {
+ x := n.Index.Val()
+ if constant.Sign(x) < 0 {
+ base.Errorf("invalid %s index %v (index must be non-negative)", why, n.Index)
+ } else if t.IsArray() && constant.Compare(x, token.GEQ, constant.MakeInt64(t.NumElem())) {
+ base.Errorf("invalid array index %v (out of bounds for %d-element array)", n.Index, t.NumElem())
+ } else if ir.IsConst(n.X, constant.String) && constant.Compare(x, token.GEQ, constant.MakeInt64(int64(len(ir.StringVal(n.X))))) {
+ base.Errorf("invalid string index %v (out of bounds for %d-byte string)", n.Index, len(ir.StringVal(n.X)))
+ } else if ir.ConstOverflow(x, types.Types[types.TINT]) {
+ base.Errorf("invalid %s index %v (index too large)", why, n.Index)
+ }
+ }
+
+ case types.TMAP:
+ n.Index = AssignConv(n.Index, t.Key(), "map index")
+ n.SetType(t.Elem())
+ n.SetOp(ir.OINDEXMAP)
+ n.Assigned = false
+ }
+ return n
+
+ case ir.ORECV:
+ n := n.(*ir.UnaryExpr)
+ n.X = Expr(n.X)
+ n.X = DefaultLit(n.X, nil)
+ l := n.X
+ t := l.Type()
+ if t == nil {
+ n.SetType(nil)
+ return n
+ }
+ if !t.IsChan() {
+ base.Errorf("invalid operation: %v (receive from non-chan type %v)", n, t)
+ n.SetType(nil)
+ return n
+ }
+
+ if !t.ChanDir().CanRecv() {
+ base.Errorf("invalid operation: %v (receive from send-only type %v)", n, t)
+ n.SetType(nil)
+ return n
+ }
+
+ n.SetType(t.Elem())
+ return n
+
+ case ir.OSEND:
+ n := n.(*ir.SendStmt)
+ n.Chan = Expr(n.Chan)
+ n.Value = Expr(n.Value)
+ n.Chan = DefaultLit(n.Chan, nil)
+ t := n.Chan.Type()
+ if t == nil {
+ return n
+ }
+ if !t.IsChan() {
+ base.Errorf("invalid operation: %v (send to non-chan type %v)", n, t)
+ return n
+ }
+
+ if !t.ChanDir().CanSend() {
+ base.Errorf("invalid operation: %v (send to receive-only type %v)", n, t)
+ return n
+ }
+
+ n.Value = AssignConv(n.Value, t.Elem(), "send")
+ if n.Value.Type() == nil {
+ return n
+ }
+ return n
+
+ case ir.OSLICEHEADER:
+ // Errors here are Fatalf instead of Errorf because only the compiler
+ // can construct an OSLICEHEADER node.
+ // Components used in OSLICEHEADER that are supplied by parsed source code
+ // have already been typechecked in e.g. OMAKESLICE earlier.
+ n := n.(*ir.SliceHeaderExpr)
+ t := n.Type()
+ if t == nil {
+ base.Fatalf("no type specified for OSLICEHEADER")
+ }
+
+ if !t.IsSlice() {
+ base.Fatalf("invalid type %v for OSLICEHEADER", n.Type())
+ }
+
+ if n.Ptr == nil || n.Ptr.Type() == nil || !n.Ptr.Type().IsUnsafePtr() {
+ base.Fatalf("need unsafe.Pointer for OSLICEHEADER")
+ }
+
+ if x := len(n.LenCap); x != 2 {
+ base.Fatalf("expected 2 params (len, cap) for OSLICEHEADER, got %d", x)
+ }
+
+ n.Ptr = Expr(n.Ptr)
+ l := Expr(n.LenCap[0])
+ c := Expr(n.LenCap[1])
+ l = DefaultLit(l, types.Types[types.TINT])
+ c = DefaultLit(c, types.Types[types.TINT])
+
+ if ir.IsConst(l, constant.Int) && ir.Int64Val(l) < 0 {
+ base.Fatalf("len for OSLICEHEADER must be non-negative")
+ }
+
+ if ir.IsConst(c, constant.Int) && ir.Int64Val(c) < 0 {
+ base.Fatalf("cap for OSLICEHEADER must be non-negative")
+ }
+
+ if ir.IsConst(l, constant.Int) && ir.IsConst(c, constant.Int) && constant.Compare(l.Val(), token.GTR, c.Val()) {
+ base.Fatalf("len larger than cap for OSLICEHEADER")
+ }
+
+ n.LenCap[0] = l
+ n.LenCap[1] = c
+ return n
+
+ case ir.OMAKESLICECOPY:
+ // Errors here are Fatalf instead of Errorf because only the compiler
+ // can construct an OMAKESLICECOPY node.
+ // Components used in OMAKESCLICECOPY that are supplied by parsed source code
+ // have already been typechecked in OMAKE and OCOPY earlier.
+ n := n.(*ir.MakeExpr)
+ t := n.Type()
+
+ if t == nil {
+ base.Fatalf("no type specified for OMAKESLICECOPY")
+ }
+
+ if !t.IsSlice() {
+ base.Fatalf("invalid type %v for OMAKESLICECOPY", n.Type())
+ }
+
+ if n.Len == nil {
+ base.Fatalf("missing len argument for OMAKESLICECOPY")
+ }
+
+ if n.Cap == nil {
+ base.Fatalf("missing slice argument to copy for OMAKESLICECOPY")
+ }
+
+ n.Len = Expr(n.Len)
+ n.Cap = Expr(n.Cap)
+
+ n.Len = DefaultLit(n.Len, types.Types[types.TINT])
+
+ if !n.Len.Type().IsInteger() && n.Type().Kind() != types.TIDEAL {
+ base.Errorf("non-integer len argument in OMAKESLICECOPY")
+ }
+
+ if ir.IsConst(n.Len, constant.Int) {
+ if ir.ConstOverflow(n.Len.Val(), types.Types[types.TINT]) {
+ base.Fatalf("len for OMAKESLICECOPY too large")
+ }
+ if constant.Sign(n.Len.Val()) < 0 {
+ base.Fatalf("len for OMAKESLICECOPY must be non-negative")
+ }
+ }
+ return n
+
+ case ir.OSLICE, ir.OSLICE3:
+ n := n.(*ir.SliceExpr)
+ n.X = Expr(n.X)
+ low, high, max := n.SliceBounds()
+ hasmax := n.Op().IsSlice3()
+ low = Expr(low)
+ high = Expr(high)
+ max = Expr(max)
+ n.X = DefaultLit(n.X, nil)
+ low = indexlit(low)
+ high = indexlit(high)
+ max = indexlit(max)
+ n.SetSliceBounds(low, high, max)
+ l := n.X
+ if l.Type() == nil {
+ n.SetType(nil)
+ return n
+ }
+ if l.Type().IsArray() {
+ if !ir.IsAssignable(n.X) {
+ base.Errorf("invalid operation %v (slice of unaddressable value)", n)
+ n.SetType(nil)
+ return n
+ }
+
+ addr := NodAddr(n.X)
+ addr.SetImplicit(true)
+ n.X = Expr(addr)
+ l = n.X
+ }
+ t := l.Type()
+ var tp *types.Type
+ if t.IsString() {
+ if hasmax {
+ base.Errorf("invalid operation %v (3-index slice of string)", n)
+ n.SetType(nil)
+ return n
+ }
+ n.SetType(t)
+ n.SetOp(ir.OSLICESTR)
+ } else if t.IsPtr() && t.Elem().IsArray() {
+ tp = t.Elem()
+ n.SetType(types.NewSlice(tp.Elem()))
+ types.CalcSize(n.Type())
+ if hasmax {
+ n.SetOp(ir.OSLICE3ARR)
+ } else {
+ n.SetOp(ir.OSLICEARR)
+ }
+ } else if t.IsSlice() {
+ n.SetType(t)
+ } else {
+ base.Errorf("cannot slice %v (type %v)", l, t)
+ n.SetType(nil)
+ return n
+ }
+
+ if low != nil && !checksliceindex(l, low, tp) {
+ n.SetType(nil)
+ return n
+ }
+ if high != nil && !checksliceindex(l, high, tp) {
+ n.SetType(nil)
+ return n
+ }
+ if max != nil && !checksliceindex(l, max, tp) {
+ n.SetType(nil)
+ return n
+ }
+ if !checksliceconst(low, high) || !checksliceconst(low, max) || !checksliceconst(high, max) {
+ n.SetType(nil)
+ return n
+ }
+ return n
+
+ // call and call like
+ case ir.OCALL:
+ n := n.(*ir.CallExpr)
+ n.Use = ir.CallUseExpr
+ if top == ctxStmt {
+ n.Use = ir.CallUseStmt
+ }
+ Stmts(n.Init()) // imported rewritten f(g()) calls (#30907)
+ n.X = check(n.X, ctxExpr|ctxType|ctxCallee)
+ if n.X.Diag() {
+ n.SetDiag(true)
+ }
+
+ l := n.X
+
+ if l.Op() == ir.ONAME && l.(*ir.Name).BuiltinOp != 0 {
+ l := l.(*ir.Name)
+ if n.IsDDD && l.BuiltinOp != ir.OAPPEND {
+ base.Errorf("invalid use of ... with builtin %v", l)
+ }
+
+ // builtin: OLEN, OCAP, etc.
+ switch l.BuiltinOp {
+ default:
+ base.Fatalf("unknown builtin %v", l)
+
+ case ir.OAPPEND, ir.ODELETE, ir.OMAKE, ir.OPRINT, ir.OPRINTN, ir.ORECOVER:
+ n.SetOp(l.BuiltinOp)
+ n.X = nil
+ n.SetTypecheck(0) // re-typechecking new op is OK, not a loop
+ return check(n, top)
+
+ case ir.OCAP, ir.OCLOSE, ir.OIMAG, ir.OLEN, ir.OPANIC, ir.OREAL:
+ typecheckargs(n)
+ fallthrough
+ case ir.ONEW, ir.OALIGNOF, ir.OOFFSETOF, ir.OSIZEOF:
+ arg, ok := needOneArg(n, "%v", n.Op())
+ if !ok {
+ n.SetType(nil)
+ return n
+ }
+ u := ir.NewUnaryExpr(n.Pos(), l.BuiltinOp, arg)
+ return check(ir.InitExpr(n.Init(), u), top) // typecheckargs can add to old.Init
+
+ case ir.OCOMPLEX, ir.OCOPY:
+ typecheckargs(n)
+ arg1, arg2, ok := needTwoArgs(n)
+ if !ok {
+ n.SetType(nil)
+ return n
+ }
+ b := ir.NewBinaryExpr(n.Pos(), l.BuiltinOp, arg1, arg2)
+ return check(ir.InitExpr(n.Init(), b), top) // typecheckargs can add to old.Init
+ }
+ panic("unreachable")
+ }
+
+ n.X = DefaultLit(n.X, nil)
+ l = n.X
+ if l.Op() == ir.OTYPE {
+ if n.IsDDD {
+ if !l.Type().Broke() {
+ base.Errorf("invalid use of ... in type conversion to %v", l.Type())
+ }
+ n.SetDiag(true)
+ }
+
+ // pick off before type-checking arguments
+ arg, ok := needOneArg(n, "conversion to %v", l.Type())
+ if !ok {
+ n.SetType(nil)
+ return n
+ }
+
+ n := ir.NewConvExpr(n.Pos(), ir.OCONV, nil, arg)
+ n.SetType(l.Type())
+ return typecheck1(n, top)
+ }
+
+ typecheckargs(n)
+ t := l.Type()
+ if t == nil {
+ n.SetType(nil)
+ return n
+ }
+ types.CheckSize(t)
+
+ switch l.Op() {
+ case ir.ODOTINTER:
+ n.SetOp(ir.OCALLINTER)
+
+ case ir.ODOTMETH:
+ l := l.(*ir.SelectorExpr)
+ n.SetOp(ir.OCALLMETH)
+
+ // typecheckaste was used here but there wasn't enough
+ // information further down the call chain to know if we
+ // were testing a method receiver for unexported fields.
+ // It isn't necessary, so just do a sanity check.
+ tp := t.Recv().Type
+
+ if l.X == nil || !types.Identical(l.X.Type(), tp) {
+ base.Fatalf("method receiver")
+ }
+
+ default:
+ n.SetOp(ir.OCALLFUNC)
+ if t.Kind() != types.TFUNC {
+ // TODO(mdempsky): Remove "o.Sym() != nil" once we stop
+ // using ir.Name for numeric literals.
+ if o := ir.Orig(l); o.Name() != nil && o.Sym() != nil && types.BuiltinPkg.Lookup(o.Sym().Name).Def != nil {
+ // be more specific when the non-function
+ // name matches a predeclared function
+ base.Errorf("cannot call non-function %L, declared at %s",
+ l, base.FmtPos(o.Name().Pos()))
+ } else {
+ base.Errorf("cannot call non-function %L", l)
+ }
+ n.SetType(nil)
+ return n
+ }
+ }
+
+ typecheckaste(ir.OCALL, n.X, n.IsDDD, t.Params(), n.Args, func() string { return fmt.Sprintf("argument to %v", n.X) })
+ if t.NumResults() == 0 {
+ return n
+ }
+ if t.NumResults() == 1 {
+ n.SetType(l.Type().Results().Field(0).Type)
+
+ if n.Op() == ir.OCALLFUNC && n.X.Op() == ir.ONAME {
+ if sym := n.X.(*ir.Name).Sym(); types.IsRuntimePkg(sym.Pkg) && sym.Name == "getg" {
+ // Emit code for runtime.getg() directly instead of calling function.
+ // Most such rewrites (for example the similar one for math.Sqrt) should be done in walk,
+ // so that the ordering pass can make sure to preserve the semantics of the original code
+ // (in particular, the exact time of the function call) by introducing temporaries.
+ // In this case, we know getg() always returns the same result within a given function
+ // and we want to avoid the temporaries, so we do the rewrite earlier than is typical.
+ n.SetOp(ir.OGETG)
+ }
+ }
+ return n
+ }
+
+ // multiple return
+ if top&(ctxMultiOK|ctxStmt) == 0 {
+ base.Errorf("multiple-value %v() in single-value context", l)
+ return n
+ }
+
+ n.SetType(l.Type().Results())
+ return n
+
+ case ir.OALIGNOF, ir.OOFFSETOF, ir.OSIZEOF:
+ n := n.(*ir.UnaryExpr)
+ n.SetType(types.Types[types.TUINTPTR])
+ return n
+
+ case ir.OCAP, ir.OLEN:
+ n := n.(*ir.UnaryExpr)
+ n.X = Expr(n.X)
+ n.X = DefaultLit(n.X, nil)
+ n.X = implicitstar(n.X)
+ l := n.X
+ t := l.Type()
+ if t == nil {
+ n.SetType(nil)
+ return n
+ }
+
+ var ok bool
+ if n.Op() == ir.OLEN {
+ ok = okforlen[t.Kind()]
+ } else {
+ ok = okforcap[t.Kind()]
+ }
+ if !ok {
+ base.Errorf("invalid argument %L for %v", l, n.Op())
+ n.SetType(nil)
+ return n
+ }
+
+ n.SetType(types.Types[types.TINT])
+ return n
+
+ case ir.OREAL, ir.OIMAG:
+ n := n.(*ir.UnaryExpr)
+ n.X = Expr(n.X)
+ l := n.X
+ t := l.Type()
+ if t == nil {
+ n.SetType(nil)
+ return n
+ }
+
+ // Determine result type.
+ switch t.Kind() {
+ case types.TIDEAL:
+ n.SetType(types.UntypedFloat)
+ case types.TCOMPLEX64:
+ n.SetType(types.Types[types.TFLOAT32])
+ case types.TCOMPLEX128:
+ n.SetType(types.Types[types.TFLOAT64])
+ default:
+ base.Errorf("invalid argument %L for %v", l, n.Op())
+ n.SetType(nil)
+ return n
+ }
+ return n
+
+ case ir.OCOMPLEX:
+ n := n.(*ir.BinaryExpr)
+ l := Expr(n.X)
+ r := Expr(n.Y)
+ if l.Type() == nil || r.Type() == nil {
+ n.SetType(nil)
+ return n
+ }
+ l, r = defaultlit2(l, r, false)
+ if l.Type() == nil || r.Type() == nil {
+ n.SetType(nil)
+ return n
+ }
+ n.X = l
+ n.Y = r
+
+ if !types.Identical(l.Type(), r.Type()) {
+ base.Errorf("invalid operation: %v (mismatched types %v and %v)", n, l.Type(), r.Type())
+ n.SetType(nil)
+ return n
+ }
+
+ var t *types.Type
+ switch l.Type().Kind() {
+ default:
+ base.Errorf("invalid operation: %v (arguments have type %v, expected floating-point)", n, l.Type())
+ n.SetType(nil)
+ return n
+
+ case types.TIDEAL:
+ t = types.UntypedComplex
+
+ case types.TFLOAT32:
+ t = types.Types[types.TCOMPLEX64]
+
+ case types.TFLOAT64:
+ t = types.Types[types.TCOMPLEX128]
+ }
+ n.SetType(t)
+ return n
+
+ case ir.OCLOSE:
+ n := n.(*ir.UnaryExpr)
+ n.X = Expr(n.X)
+ n.X = DefaultLit(n.X, nil)
+ l := n.X
+ t := l.Type()
+ if t == nil {
+ n.SetType(nil)
+ return n
+ }
+ if !t.IsChan() {
+ base.Errorf("invalid operation: %v (non-chan type %v)", n, t)
+ n.SetType(nil)
+ return n
+ }
+
+ if !t.ChanDir().CanSend() {
+ base.Errorf("invalid operation: %v (cannot close receive-only channel)", n)
+ n.SetType(nil)
+ return n
+ }
+ return n
+
+ case ir.ODELETE:
+ n := n.(*ir.CallExpr)
+ typecheckargs(n)
+ args := n.Args
+ if len(args) == 0 {
+ base.Errorf("missing arguments to delete")
+ n.SetType(nil)
+ return n
+ }
+
+ if len(args) == 1 {
+ base.Errorf("missing second (key) argument to delete")
+ n.SetType(nil)
+ return n
+ }
+
+ if len(args) != 2 {
+ base.Errorf("too many arguments to delete")
+ n.SetType(nil)
+ return n
+ }
+
+ l := args[0]
+ r := args[1]
+ if l.Type() != nil && !l.Type().IsMap() {
+ base.Errorf("first argument to delete must be map; have %L", l.Type())
+ n.SetType(nil)
+ return n
+ }
+
+ args[1] = AssignConv(r, l.Type().Key(), "delete")
+ return n
+
+ case ir.OAPPEND:
+ n := n.(*ir.CallExpr)
+ typecheckargs(n)
+ args := n.Args
+ if len(args) == 0 {
+ base.Errorf("missing arguments to append")
+ n.SetType(nil)
+ return n
+ }
+
+ t := args[0].Type()
+ if t == nil {
+ n.SetType(nil)
+ return n
+ }
+
+ n.SetType(t)
+ if !t.IsSlice() {
+ if ir.IsNil(args[0]) {
+ base.Errorf("first argument to append must be typed slice; have untyped nil")
+ n.SetType(nil)
+ return n
+ }
+
+ base.Errorf("first argument to append must be slice; have %L", t)
+ n.SetType(nil)
+ return n
+ }
+
+ if n.IsDDD {
+ if len(args) == 1 {
+ base.Errorf("cannot use ... on first argument to append")
+ n.SetType(nil)
+ return n
+ }
+
+ if len(args) != 2 {
+ base.Errorf("too many arguments to append")
+ n.SetType(nil)
+ return n
+ }
+
+ if t.Elem().IsKind(types.TUINT8) && args[1].Type().IsString() {
+ args[1] = DefaultLit(args[1], types.Types[types.TSTRING])
+ return n
+ }
+
+ args[1] = AssignConv(args[1], t.Underlying(), "append")
+ return n
+ }
+
+ as := args[1:]
+ for i, n := range as {
+ if n.Type() == nil {
+ continue
+ }
+ as[i] = AssignConv(n, t.Elem(), "append")
+ types.CheckSize(as[i].Type()) // ensure width is calculated for backend
+ }
+ return n
+
+ case ir.OCOPY:
+ n := n.(*ir.BinaryExpr)
+ n.SetType(types.Types[types.TINT])
+ n.X = Expr(n.X)
+ n.X = DefaultLit(n.X, nil)
+ n.Y = Expr(n.Y)
+ n.Y = DefaultLit(n.Y, nil)
+ if n.X.Type() == nil || n.Y.Type() == nil {
+ n.SetType(nil)
+ return n
+ }
+
+ // copy([]byte, string)
+ if n.X.Type().IsSlice() && n.Y.Type().IsString() {
+ if types.Identical(n.X.Type().Elem(), types.ByteType) {
+ return n
+ }
+ base.Errorf("arguments to copy have different element types: %L and string", n.X.Type())
+ n.SetType(nil)
+ return n
+ }
+
+ if !n.X.Type().IsSlice() || !n.Y.Type().IsSlice() {
+ if !n.X.Type().IsSlice() && !n.Y.Type().IsSlice() {
+ base.Errorf("arguments to copy must be slices; have %L, %L", n.X.Type(), n.Y.Type())
+ } else if !n.X.Type().IsSlice() {
+ base.Errorf("first argument to copy should be slice; have %L", n.X.Type())
+ } else {
+ base.Errorf("second argument to copy should be slice or string; have %L", n.Y.Type())
+ }
+ n.SetType(nil)
+ return n
+ }
+
+ if !types.Identical(n.X.Type().Elem(), n.Y.Type().Elem()) {
+ base.Errorf("arguments to copy have different element types: %L and %L", n.X.Type(), n.Y.Type())
+ n.SetType(nil)
+ return n
+ }
+ return n
+
+ case ir.OCONV:
+ n := n.(*ir.ConvExpr)
+ types.CheckSize(n.Type()) // ensure width is calculated for backend
+ n.X = Expr(n.X)
+ n.X = convlit1(n.X, n.Type(), true, nil)
+ t := n.X.Type()
+ if t == nil || n.Type() == nil {
+ n.SetType(nil)
+ return n
+ }
+ op, why := convertop(n.X.Op() == ir.OLITERAL, t, n.Type())
+ if op == ir.OXXX {
+ if !n.Diag() && !n.Type().Broke() && !n.X.Diag() {
+ base.Errorf("cannot convert %L to type %v%s", n.X, n.Type(), why)
+ n.SetDiag(true)
+ }
+ n.SetOp(ir.OCONV)
+ n.SetType(nil)
+ return n
+ }
+
+ n.SetOp(op)
+ switch n.Op() {
+ case ir.OCONVNOP:
+ if t.Kind() == n.Type().Kind() {
+ switch t.Kind() {
+ case types.TFLOAT32, types.TFLOAT64, types.TCOMPLEX64, types.TCOMPLEX128:
+ // Floating point casts imply rounding and
+ // so the conversion must be kept.
+ n.SetOp(ir.OCONV)
+ }
+ }
+
+ // do not convert to []byte literal. See CL 125796.
+ // generated code and compiler memory footprint is better without it.
+ case ir.OSTR2BYTES:
+ // ok
+
+ case ir.OSTR2RUNES:
+ if n.X.Op() == ir.OLITERAL {
+ return stringtoruneslit(n)
+ }
+ }
+ return n
+
+ case ir.OMAKE:
+ n := n.(*ir.CallExpr)
+ args := n.Args
+ if len(args) == 0 {
+ base.Errorf("missing argument to make")
+ n.SetType(nil)
+ return n
+ }
+
+ n.Args.Set(nil)
+ l := args[0]
+ l = check(l, ctxType)
+ t := l.Type()
+ if t == nil {
+ n.SetType(nil)
+ return n
+ }
+
+ i := 1
+ var nn ir.Node
+ switch t.Kind() {
+ default:
+ base.Errorf("cannot make type %v", t)
+ n.SetType(nil)
+ return n
+
+ case types.TSLICE:
+ if i >= len(args) {
+ base.Errorf("missing len argument to make(%v)", t)
+ n.SetType(nil)
+ return n
+ }
+
+ l = args[i]
+ i++
+ l = Expr(l)
+ var r ir.Node
+ if i < len(args) {
+ r = args[i]
+ i++
+ r = Expr(r)
+ }
+
+ if l.Type() == nil || (r != nil && r.Type() == nil) {
+ n.SetType(nil)
+ return n
+ }
+ if !checkmake(t, "len", &l) || r != nil && !checkmake(t, "cap", &r) {
+ n.SetType(nil)
+ return n
+ }
+ if ir.IsConst(l, constant.Int) && r != nil && ir.IsConst(r, constant.Int) && constant.Compare(l.Val(), token.GTR, r.Val()) {
+ base.Errorf("len larger than cap in make(%v)", t)
+ n.SetType(nil)
+ return n
+ }
+ nn = ir.NewMakeExpr(n.Pos(), ir.OMAKESLICE, l, r)
+
+ case types.TMAP:
+ if i < len(args) {
+ l = args[i]
+ i++
+ l = Expr(l)
+ l = DefaultLit(l, types.Types[types.TINT])
+ if l.Type() == nil {
+ n.SetType(nil)
+ return n
+ }
+ if !checkmake(t, "size", &l) {
+ n.SetType(nil)
+ return n
+ }
+ } else {
+ l = ir.NewInt(0)
+ }
+ nn = ir.NewMakeExpr(n.Pos(), ir.OMAKEMAP, l, nil)
+ nn.SetEsc(n.Esc())
+
+ case types.TCHAN:
+ l = nil
+ if i < len(args) {
+ l = args[i]
+ i++
+ l = Expr(l)
+ l = DefaultLit(l, types.Types[types.TINT])
+ if l.Type() == nil {
+ n.SetType(nil)
+ return n
+ }
+ if !checkmake(t, "buffer", &l) {
+ n.SetType(nil)
+ return n
+ }
+ } else {
+ l = ir.NewInt(0)
+ }
+ nn = ir.NewMakeExpr(n.Pos(), ir.OMAKECHAN, l, nil)
+ }
+
+ if i < len(args) {
+ base.Errorf("too many arguments to make(%v)", t)
+ n.SetType(nil)
+ return n
+ }
+
+ nn.SetType(t)
+ return nn
+
+ case ir.ONEW:
+ n := n.(*ir.UnaryExpr)
+ if n.X == nil {
+ // Fatalf because the OCALL above checked for us,
+ // so this must be an internally-generated mistake.
+ base.Fatalf("missing argument to new")
+ }
+ l := n.X
+ l = check(l, ctxType)
+ t := l.Type()
+ if t == nil {
+ n.SetType(nil)
+ return n
+ }
+ n.X = l
+ n.SetType(types.NewPtr(t))
+ return n
+
+ case ir.OPRINT, ir.OPRINTN:
+ n := n.(*ir.CallExpr)
+ typecheckargs(n)
+ ls := n.Args
+ for i1, n1 := range ls {
+ // Special case for print: int constant is int64, not int.
+ if ir.IsConst(n1, constant.Int) {
+ ls[i1] = DefaultLit(ls[i1], types.Types[types.TINT64])
+ } else {
+ ls[i1] = DefaultLit(ls[i1], nil)
+ }
+ }
+ return n
+
+ case ir.OPANIC:
+ n := n.(*ir.UnaryExpr)
+ n.X = Expr(n.X)
+ n.X = DefaultLit(n.X, types.Types[types.TINTER])
+ if n.X.Type() == nil {
+ n.SetType(nil)
+ return n
+ }
+ return n
+
+ case ir.ORECOVER:
+ n := n.(*ir.CallExpr)
+ if len(n.Args) != 0 {
+ base.Errorf("too many arguments to recover")
+ n.SetType(nil)
+ return n
+ }
+
+ n.SetType(types.Types[types.TINTER])
+ return n
+
+ case ir.OCLOSURE:
+ n := n.(*ir.ClosureExpr)
+ typecheckclosure(n, top)
+ if n.Type() == nil {
+ return n
+ }
+ return n
+
+ case ir.OITAB:
+ n := n.(*ir.UnaryExpr)
+ n.X = Expr(n.X)
+ t := n.X.Type()
+ if t == nil {
+ n.SetType(nil)
+ return n
+ }
+ if !t.IsInterface() {
+ base.Fatalf("OITAB of %v", t)
+ }
+ n.SetType(types.NewPtr(types.Types[types.TUINTPTR]))
+ return n
+
+ case ir.OIDATA:
+ // Whoever creates the OIDATA node must know a priori the concrete type at that moment,
+ // usually by just having checked the OITAB.
+ n := n.(*ir.UnaryExpr)
+ base.Fatalf("cannot typecheck interface data %v", n)
+ panic("unreachable")
+
+ case ir.OSPTR:
+ n := n.(*ir.UnaryExpr)
+ n.X = Expr(n.X)
+ t := n.X.Type()
+ if t == nil {
+ n.SetType(nil)
+ return n
+ }
+ if !t.IsSlice() && !t.IsString() {
+ base.Fatalf("OSPTR of %v", t)
+ }
+ if t.IsString() {
+ n.SetType(types.NewPtr(types.Types[types.TUINT8]))
+ } else {
+ n.SetType(types.NewPtr(t.Elem()))
+ }
+ return n
+
+ case ir.OCLOSUREREAD:
+ return n
+
+ case ir.OCFUNC:
+ n := n.(*ir.UnaryExpr)
+ n.X = Expr(n.X)
+ n.SetType(types.Types[types.TUINTPTR])
+ return n
+
+ case ir.OCONVNOP:
+ n := n.(*ir.ConvExpr)
+ n.X = Expr(n.X)
+ return n
+
+ // statements
+ case ir.OAS:
+ n := n.(*ir.AssignStmt)
+ typecheckas(n)
+
+ // Code that creates temps does not bother to set defn, so do it here.
+ if n.X.Op() == ir.ONAME && ir.IsAutoTmp(n.X) {
+ n.X.Name().Defn = n
+ }
+ return n
+
+ case ir.OAS2:
+ typecheckas2(n.(*ir.AssignListStmt))
+ return n
+
+ case ir.OBREAK,
+ ir.OCONTINUE,
+ ir.ODCL,
+ ir.OGOTO,
+ ir.OFALL,
+ ir.OVARKILL,
+ ir.OVARLIVE:
+ return n
+
+ case ir.OBLOCK:
+ n := n.(*ir.BlockStmt)
+ Stmts(n.List)
+ return n
+
+ case ir.OLABEL:
+ decldepth++
+ if n.Sym().IsBlank() {
+ // Empty identifier is valid but useless.
+ // Eliminate now to simplify life later.
+ // See issues 7538, 11589, 11593.
+ n = ir.NewBlockStmt(n.Pos(), nil)
+ }
+ return n
+
+ case ir.ODEFER, ir.OGO:
+ n := n.(*ir.GoDeferStmt)
+ n.Call = check(n.Call, ctxStmt|ctxExpr)
+ if !n.Call.Diag() {
+ checkdefergo(n)
+ }
+ return n
+
+ case ir.OFOR, ir.OFORUNTIL:
+ n := n.(*ir.ForStmt)
+ Stmts(n.Init())
+ decldepth++
+ n.Cond = Expr(n.Cond)
+ n.Cond = DefaultLit(n.Cond, nil)
+ if n.Cond != nil {
+ t := n.Cond.Type()
+ if t != nil && !t.IsBoolean() {
+ base.Errorf("non-bool %L used as for condition", n.Cond)
+ }
+ }
+ n.Post = Stmt(n.Post)
+ if n.Op() == ir.OFORUNTIL {
+ Stmts(n.Late)
+ }
+ Stmts(n.Body)
+ decldepth--
+ return n
+
+ case ir.OIF:
+ n := n.(*ir.IfStmt)
+ Stmts(n.Init())
+ n.Cond = Expr(n.Cond)
+ n.Cond = DefaultLit(n.Cond, nil)
+ if n.Cond != nil {
+ t := n.Cond.Type()
+ if t != nil && !t.IsBoolean() {
+ base.Errorf("non-bool %L used as if condition", n.Cond)
+ }
+ }
+ Stmts(n.Body)
+ Stmts(n.Else)
+ return n
+
+ case ir.ORETURN:
+ n := n.(*ir.ReturnStmt)
+ typecheckargs(n)
+ if ir.CurFunc == nil {
+ base.Errorf("return outside function")
+ n.SetType(nil)
+ return n
+ }
+
+ if ir.HasNamedResults(ir.CurFunc) && len(n.Results) == 0 {
+ return n
+ }
+ typecheckaste(ir.ORETURN, nil, false, ir.CurFunc.Type().Results(), n.Results, func() string { return "return argument" })
+ return n
+
+ case ir.ORETJMP:
+ n := n.(*ir.BranchStmt)
+ return n
+
+ case ir.OSELECT:
+ typecheckselect(n.(*ir.SelectStmt))
+ return n
+
+ case ir.OSWITCH:
+ typecheckswitch(n.(*ir.SwitchStmt))
+ return n
+
+ case ir.ORANGE:
+ typecheckrange(n.(*ir.RangeStmt))
+ return n
+
+ case ir.OTYPESW:
+ n := n.(*ir.TypeSwitchGuard)
+ base.Errorf("use of .(type) outside type switch")
+ n.SetType(nil)
+ return n
+
+ case ir.ODCLFUNC:
+ typecheckfunc(n.(*ir.Func))
+ return n
+
+ case ir.ODCLCONST:
+ n := n.(*ir.Decl)
+ n.X = Expr(n.X)
+ return n
+
+ case ir.ODCLTYPE:
+ n := n.(*ir.Decl)
+ n.X = check(n.X, ctxType)
+ types.CheckSize(n.X.Type())
+ return n
+ }
+
+ // No return n here!
+ // Individual cases can type-assert n, introducing a new one.
+ // Each must execute its own return n.
+}
+
+func typecheckargs(n ir.Node) {
+ var list []ir.Node
+ switch n := n.(type) {
+ default:
+ base.Fatalf("typecheckargs %+v", n.Op())
+ case *ir.CallExpr:
+ list = n.Args
+ if n.IsDDD {
+ Exprs(list)
+ return
+ }
+ case *ir.ReturnStmt:
+ list = n.Results
+ }
+ if len(list) != 1 {
+ Exprs(list)
+ return
+ }
+
+ typecheckslice(list, ctxExpr|ctxMultiOK)
+ t := list[0].Type()
+ if t == nil || !t.IsFuncArgStruct() {
+ return
+ }
+
+ // Rewrite f(g()) into t1, t2, ... = g(); f(t1, t2, ...).
+
+ // Save n as n.Orig for fmt.go.
+ if ir.Orig(n) == n {
+ n.(ir.OrigNode).SetOrig(ir.SepCopy(n))
+ }
+
+ as := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
+ as.Rhs.Append(list...)
+
+ // If we're outside of function context, then this call will
+ // be executed during the generated init function. However,
+ // init.go hasn't yet created it. Instead, associate the
+ // temporary variables with initTodo for now, and init.go
+ // will reassociate them later when it's appropriate.
+ static := ir.CurFunc == nil
+ if static {
+ ir.CurFunc = InitTodoFunc
+ }
+ list = nil
+ for _, f := range t.FieldSlice() {
+ t := Temp(f.Type)
+ as.PtrInit().Append(ir.NewDecl(base.Pos, ir.ODCL, t))
+ as.Lhs.Append(t)
+ list = append(list, t)
+ }
+ if static {
+ ir.CurFunc = nil
+ }
+
+ switch n := n.(type) {
+ case *ir.CallExpr:
+ n.Args.Set(list)
+ case *ir.ReturnStmt:
+ n.Results.Set(list)
+ }
+
+ n.PtrInit().Append(Stmt(as))
+}
+
+func checksliceindex(l ir.Node, r ir.Node, tp *types.Type) bool {
+ t := r.Type()
+ if t == nil {
+ return false
+ }
+ if !t.IsInteger() {
+ base.Errorf("invalid slice index %v (type %v)", r, t)
+ return false
+ }
+
+ if r.Op() == ir.OLITERAL {
+ x := r.Val()
+ if constant.Sign(x) < 0 {
+ base.Errorf("invalid slice index %v (index must be non-negative)", r)
+ return false
+ } else if tp != nil && tp.NumElem() >= 0 && constant.Compare(x, token.GTR, constant.MakeInt64(tp.NumElem())) {
+ base.Errorf("invalid slice index %v (out of bounds for %d-element array)", r, tp.NumElem())
+ return false
+ } else if ir.IsConst(l, constant.String) && constant.Compare(x, token.GTR, constant.MakeInt64(int64(len(ir.StringVal(l))))) {
+ base.Errorf("invalid slice index %v (out of bounds for %d-byte string)", r, len(ir.StringVal(l)))
+ return false
+ } else if ir.ConstOverflow(x, types.Types[types.TINT]) {
+ base.Errorf("invalid slice index %v (index too large)", r)
+ return false
+ }
+ }
+
+ return true
+}
+
+func checksliceconst(lo ir.Node, hi ir.Node) bool {
+ if lo != nil && hi != nil && lo.Op() == ir.OLITERAL && hi.Op() == ir.OLITERAL && constant.Compare(lo.Val(), token.GTR, hi.Val()) {
+ base.Errorf("invalid slice index: %v > %v", lo, hi)
+ return false
+ }
+
+ return true
+}
+
+func checkdefergo(n *ir.GoDeferStmt) {
+ what := "defer"
+ if n.Op() == ir.OGO {
+ what = "go"
+ }
+
+ switch n.Call.Op() {
+ // ok
+ case ir.OCALLINTER,
+ ir.OCALLMETH,
+ ir.OCALLFUNC,
+ ir.OCLOSE,
+ ir.OCOPY,
+ ir.ODELETE,
+ ir.OPANIC,
+ ir.OPRINT,
+ ir.OPRINTN,
+ ir.ORECOVER:
+ return
+
+ case ir.OAPPEND,
+ ir.OCAP,
+ ir.OCOMPLEX,
+ ir.OIMAG,
+ ir.OLEN,
+ ir.OMAKE,
+ ir.OMAKESLICE,
+ ir.OMAKECHAN,
+ ir.OMAKEMAP,
+ ir.ONEW,
+ ir.OREAL,
+ ir.OLITERAL: // conversion or unsafe.Alignof, Offsetof, Sizeof
+ if orig := ir.Orig(n.Call); orig.Op() == ir.OCONV {
+ break
+ }
+ base.ErrorfAt(n.Pos(), "%s discards result of %v", what, n.Call)
+ return
+ }
+
+ // type is broken or missing, most likely a method call on a broken type
+ // we will warn about the broken type elsewhere. no need to emit a potentially confusing error
+ if n.Call.Type() == nil || n.Call.Type().Broke() {
+ return
+ }
+
+ if !n.Diag() {
+ // The syntax made sure it was a call, so this must be
+ // a conversion.
+ n.SetDiag(true)
+ base.ErrorfAt(n.Pos(), "%s requires function call, not conversion", what)
+ }
+}
+
+// The result of implicitstar MUST be assigned back to n, e.g.
+// n.Left = implicitstar(n.Left)
+func implicitstar(n ir.Node) ir.Node {
+ // insert implicit * if needed for fixed array
+ t := n.Type()
+ if t == nil || !t.IsPtr() {
+ return n
+ }
+ t = t.Elem()
+ if t == nil {
+ return n
+ }
+ if !t.IsArray() {
+ return n
+ }
+ star := ir.NewStarExpr(base.Pos, n)
+ star.SetImplicit(true)
+ return Expr(star)
+}
+
+func needOneArg(n *ir.CallExpr, f string, args ...interface{}) (ir.Node, bool) {
+ if len(n.Args) == 0 {
+ p := fmt.Sprintf(f, args...)
+ base.Errorf("missing argument to %s: %v", p, n)
+ return nil, false
+ }
+
+ if len(n.Args) > 1 {
+ p := fmt.Sprintf(f, args...)
+ base.Errorf("too many arguments to %s: %v", p, n)
+ return n.Args[0], false
+ }
+
+ return n.Args[0], true
+}
+
+func needTwoArgs(n *ir.CallExpr) (ir.Node, ir.Node, bool) {
+ if len(n.Args) != 2 {
+ if len(n.Args) < 2 {
+ base.Errorf("not enough arguments in call to %v", n)
+ } else {
+ base.Errorf("too many arguments in call to %v", n)
+ }
+ return nil, nil, false
+ }
+ return n.Args[0], n.Args[1], true
+}
+
+func lookdot1(errnode ir.Node, s *types.Sym, t *types.Type, fs *types.Fields, dostrcmp int) *types.Field {
+ var r *types.Field
+ for _, f := range fs.Slice() {
+ if dostrcmp != 0 && f.Sym.Name == s.Name {
+ return f
+ }
+ if dostrcmp == 2 && strings.EqualFold(f.Sym.Name, s.Name) {
+ return f
+ }
+ if f.Sym != s {
+ continue
+ }
+ if r != nil {
+ if errnode != nil {
+ base.Errorf("ambiguous selector %v", errnode)
+ } else if t.IsPtr() {
+ base.Errorf("ambiguous selector (%v).%v", t, s)
+ } else {
+ base.Errorf("ambiguous selector %v.%v", t, s)
+ }
+ break
+ }
+
+ r = f
+ }
+
+ return r
+}
+
+// typecheckMethodExpr checks selector expressions (ODOT) where the
+// base expression is a type expression (OTYPE).
+func typecheckMethodExpr(n *ir.SelectorExpr) (res ir.Node) {
+ if base.EnableTrace && base.Flag.LowerT {
+ defer tracePrint("typecheckMethodExpr", n)(&res)
+ }
+
+ t := n.X.Type()
+
+ // Compute the method set for t.
+ var ms *types.Fields
+ if t.IsInterface() {
+ ms = t.Fields()
+ } else {
+ mt := types.ReceiverBaseType(t)
+ if mt == nil {
+ base.Errorf("%v undefined (type %v has no method %v)", n, t, n.Sel)
+ n.SetType(nil)
+ return n
+ }
+ CalcMethods(mt)
+ ms = mt.AllMethods()
+
+ // The method expression T.m requires a wrapper when T
+ // is different from m's declared receiver type. We
+ // normally generate these wrappers while writing out
+ // runtime type descriptors, which is always done for
+ // types declared at package scope. However, we need
+ // to make sure to generate wrappers for anonymous
+ // receiver types too.
+ if mt.Sym() == nil {
+ NeedRuntimeType(t)
+ }
+ }
+
+ s := n.Sel
+ m := lookdot1(n, s, t, ms, 0)
+ if m == nil {
+ if lookdot1(n, s, t, ms, 1) != nil {
+ base.Errorf("%v undefined (cannot refer to unexported method %v)", n, s)
+ } else if _, ambig := dotpath(s, t, nil, false); ambig {
+ base.Errorf("%v undefined (ambiguous selector)", n) // method or field
+ } else {
+ base.Errorf("%v undefined (type %v has no method %v)", n, t, s)
+ }
+ n.SetType(nil)
+ return n
+ }
+
+ if !types.IsMethodApplicable(t, m) {
+ base.Errorf("invalid method expression %v (needs pointer receiver: (*%v).%S)", n, t, s)
+ n.SetType(nil)
+ return n
+ }
+
+ me := ir.NewMethodExpr(n.Pos(), n.X.Type(), m)
+ me.SetType(NewMethodType(m.Type, n.X.Type()))
+ f := NewName(ir.MethodSym(t, m.Sym))
+ f.Class_ = ir.PFUNC
+ f.SetType(me.Type())
+ me.FuncName_ = f
+
+ // Issue 25065. Make sure that we emit the symbol for a local method.
+ if base.Ctxt.Flag_dynlink && !inimport && (t.Sym() == nil || t.Sym().Pkg == types.LocalPkg) {
+ NeedFuncSym(me.FuncName_.Sym())
+ }
+
+ return me
+}
+
+func derefall(t *types.Type) *types.Type {
+ for t != nil && t.IsPtr() {
+ t = t.Elem()
+ }
+ return t
+}
+
+func lookdot(n *ir.SelectorExpr, t *types.Type, dostrcmp int) *types.Field {
+ s := n.Sel
+
+ types.CalcSize(t)
+ var f1 *types.Field
+ if t.IsStruct() || t.IsInterface() {
+ f1 = lookdot1(n, s, t, t.Fields(), dostrcmp)
+ }
+
+ var f2 *types.Field
+ if n.X.Type() == t || n.X.Type().Sym() == nil {
+ mt := types.ReceiverBaseType(t)
+ if mt != nil {
+ f2 = lookdot1(n, s, mt, mt.Methods(), dostrcmp)
+ }
+ }
+
+ if f1 != nil {
+ if dostrcmp > 1 || f1.Broke() {
+ // Already in the process of diagnosing an error.
+ return f1
+ }
+ if f2 != nil {
+ base.Errorf("%v is both field and method", n.Sel)
+ }
+ if f1.Offset == types.BADWIDTH {
+ base.Fatalf("lookdot badwidth %v %p", f1, f1)
+ }
+ n.Offset = f1.Offset
+ n.SetType(f1.Type)
+ if t.IsInterface() {
+ if n.X.Type().IsPtr() {
+ star := ir.NewStarExpr(base.Pos, n.X)
+ star.SetImplicit(true)
+ n.X = Expr(star)
+ }
+
+ n.SetOp(ir.ODOTINTER)
+ }
+ n.Selection = f1
+ return f1
+ }
+
+ if f2 != nil {
+ if dostrcmp > 1 {
+ // Already in the process of diagnosing an error.
+ return f2
+ }
+ tt := n.X.Type()
+ types.CalcSize(tt)
+ rcvr := f2.Type.Recv().Type
+ if !types.Identical(rcvr, tt) {
+ if rcvr.IsPtr() && types.Identical(rcvr.Elem(), tt) {
+ checklvalue(n.X, "call pointer method on")
+ addr := NodAddr(n.X)
+ addr.SetImplicit(true)
+ n.X = check(addr, ctxType|ctxExpr)
+ } else if tt.IsPtr() && (!rcvr.IsPtr() || rcvr.IsPtr() && rcvr.Elem().NotInHeap()) && types.Identical(tt.Elem(), rcvr) {
+ star := ir.NewStarExpr(base.Pos, n.X)
+ star.SetImplicit(true)
+ n.X = check(star, ctxType|ctxExpr)
+ } else if tt.IsPtr() && tt.Elem().IsPtr() && types.Identical(derefall(tt), derefall(rcvr)) {
+ base.Errorf("calling method %v with receiver %L requires explicit dereference", n.Sel, n.X)
+ for tt.IsPtr() {
+ // Stop one level early for method with pointer receiver.
+ if rcvr.IsPtr() && !tt.Elem().IsPtr() {
+ break
+ }
+ star := ir.NewStarExpr(base.Pos, n.X)
+ star.SetImplicit(true)
+ n.X = check(star, ctxType|ctxExpr)
+ tt = tt.Elem()
+ }
+ } else {
+ base.Fatalf("method mismatch: %v for %v", rcvr, tt)
+ }
+ }
+
+ implicit, ll := n.Implicit(), n.X
+ for ll != nil && (ll.Op() == ir.ODOT || ll.Op() == ir.ODOTPTR || ll.Op() == ir.ODEREF) {
+ switch l := ll.(type) {
+ case *ir.SelectorExpr:
+ implicit, ll = l.Implicit(), l.X
+ case *ir.StarExpr:
+ implicit, ll = l.Implicit(), l.X
+ }
+ }
+ if implicit && ll.Type().IsPtr() && ll.Type().Sym() != nil && ll.Type().Sym().Def != nil && ir.AsNode(ll.Type().Sym().Def).Op() == ir.OTYPE {
+ // It is invalid to automatically dereference a named pointer type when selecting a method.
+ // Make n.Left == ll to clarify error message.
+ n.X = ll
+ return nil
+ }
+
+ n.Sel = ir.MethodSym(n.X.Type(), f2.Sym)
+ n.Offset = f2.Offset
+ n.SetType(f2.Type)
+ n.SetOp(ir.ODOTMETH)
+ n.Selection = f2
+
+ return f2
+ }
+
+ return nil
+}
+
+func nokeys(l ir.Nodes) bool {
+ for _, n := range l {
+ if n.Op() == ir.OKEY || n.Op() == ir.OSTRUCTKEY {
+ return false
+ }
+ }
+ return true
+}
+
+func hasddd(t *types.Type) bool {
+ for _, tl := range t.Fields().Slice() {
+ if tl.IsDDD() {
+ return true
+ }
+ }
+
+ return false
+}
+
+// typecheck assignment: type list = expression list
+func typecheckaste(op ir.Op, call ir.Node, isddd bool, tstruct *types.Type, nl ir.Nodes, desc func() string) {
+ var t *types.Type
+ var i int
+
+ lno := base.Pos
+ defer func() { base.Pos = lno }()
+
+ if tstruct.Broke() {
+ return
+ }
+
+ var n ir.Node
+ if len(nl) == 1 {
+ n = nl[0]
+ }
+
+ n1 := tstruct.NumFields()
+ n2 := len(nl)
+ if !hasddd(tstruct) {
+ if n2 > n1 {
+ goto toomany
+ }
+ if n2 < n1 {
+ goto notenough
+ }
+ } else {
+ if !isddd {
+ if n2 < n1-1 {
+ goto notenough
+ }
+ } else {
+ if n2 > n1 {
+ goto toomany
+ }
+ if n2 < n1 {
+ goto notenough
+ }
+ }
+ }
+
+ i = 0
+ for _, tl := range tstruct.Fields().Slice() {
+ t = tl.Type
+ if tl.IsDDD() {
+ if isddd {
+ if i >= len(nl) {
+ goto notenough
+ }
+ if len(nl)-i > 1 {
+ goto toomany
+ }
+ n = nl[i]
+ ir.SetPos(n)
+ if n.Type() != nil {
+ nl[i] = assignconvfn(n, t, desc)
+ }
+ return
+ }
+
+ // TODO(mdempsky): Make into ... call with implicit slice.
+ for ; i < len(nl); i++ {
+ n = nl[i]
+ ir.SetPos(n)
+ if n.Type() != nil {
+ nl[i] = assignconvfn(n, t.Elem(), desc)
+ }
+ }
+ return
+ }
+
+ if i >= len(nl) {
+ goto notenough
+ }
+ n = nl[i]
+ ir.SetPos(n)
+ if n.Type() != nil {
+ nl[i] = assignconvfn(n, t, desc)
+ }
+ i++
+ }
+
+ if i < len(nl) {
+ goto toomany
+ }
+ if isddd {
+ if call != nil {
+ base.Errorf("invalid use of ... in call to %v", call)
+ } else {
+ base.Errorf("invalid use of ... in %v", op)
+ }
+ }
+ return
+
+notenough:
+ if n == nil || (!n.Diag() && n.Type() != nil) {
+ details := errorDetails(nl, tstruct, isddd)
+ if call != nil {
+ // call is the expression being called, not the overall call.
+ // Method expressions have the form T.M, and the compiler has
+ // rewritten those to ONAME nodes but left T in Left.
+ if call.Op() == ir.OMETHEXPR {
+ call := call.(*ir.MethodExpr)
+ base.Errorf("not enough arguments in call to method expression %v%s", call, details)
+ } else {
+ base.Errorf("not enough arguments in call to %v%s", call, details)
+ }
+ } else {
+ base.Errorf("not enough arguments to %v%s", op, details)
+ }
+ if n != nil {
+ n.SetDiag(true)
+ }
+ }
+ return
+
+toomany:
+ details := errorDetails(nl, tstruct, isddd)
+ if call != nil {
+ base.Errorf("too many arguments in call to %v%s", call, details)
+ } else {
+ base.Errorf("too many arguments to %v%s", op, details)
+ }
+}
+
+func errorDetails(nl ir.Nodes, tstruct *types.Type, isddd bool) string {
+ // If we don't know any type at a call site, let's suppress any return
+ // message signatures. See Issue https://golang.org/issues/19012.
+ if tstruct == nil {
+ return ""
+ }
+ // If any node has an unknown type, suppress it as well
+ for _, n := range nl {
+ if n.Type() == nil {
+ return ""
+ }
+ }
+ return fmt.Sprintf("\n\thave %s\n\twant %v", fmtSignature(nl, isddd), tstruct)
+}
+
+// sigrepr is a type's representation to the outside world,
+// in string representations of return signatures
+// e.g in error messages about wrong arguments to return.
+func sigrepr(t *types.Type, isddd bool) string {
+ switch t {
+ case types.UntypedString:
+ return "string"
+ case types.UntypedBool:
+ return "bool"
+ }
+
+ if t.Kind() == types.TIDEAL {
+ // "untyped number" is not commonly used
+ // outside of the compiler, so let's use "number".
+ // TODO(mdempsky): Revisit this.
+ return "number"
+ }
+
+ // Turn []T... argument to ...T for clearer error message.
+ if isddd {
+ if !t.IsSlice() {
+ base.Fatalf("bad type for ... argument: %v", t)
+ }
+ return "..." + t.Elem().String()
+ }
+ return t.String()
+}
+
+// sigerr returns the signature of the types at the call or return.
+func fmtSignature(nl ir.Nodes, isddd bool) string {
+ if len(nl) < 1 {
+ return "()"
+ }
+
+ var typeStrings []string
+ for i, n := range nl {
+ isdddArg := isddd && i == len(nl)-1
+ typeStrings = append(typeStrings, sigrepr(n.Type(), isdddArg))
+ }
+
+ return fmt.Sprintf("(%s)", strings.Join(typeStrings, ", "))
+}
+
+// type check composite
+func fielddup(name string, hash map[string]bool) {
+ if hash[name] {
+ base.Errorf("duplicate field name in struct literal: %s", name)
+ return
+ }
+ hash[name] = true
+}
+
+// iscomptype reports whether type t is a composite literal type.
+func iscomptype(t *types.Type) bool {
+ switch t.Kind() {
+ case types.TARRAY, types.TSLICE, types.TSTRUCT, types.TMAP:
+ return true
+ default:
+ return false
+ }
+}
+
+// pushtype adds elided type information for composite literals if
+// appropriate, and returns the resulting expression.
+func pushtype(nn ir.Node, t *types.Type) ir.Node {
+ if nn == nil || nn.Op() != ir.OCOMPLIT {
+ return nn
+ }
+ n := nn.(*ir.CompLitExpr)
+ if n.Ntype != nil {
+ return n
+ }
+
+ switch {
+ case iscomptype(t):
+ // For T, return T{...}.
+ n.Ntype = ir.TypeNode(t)
+
+ case t.IsPtr() && iscomptype(t.Elem()):
+ // For *T, return &T{...}.
+ n.Ntype = ir.TypeNode(t.Elem())
+
+ addr := NodAddrAt(n.Pos(), n)
+ addr.SetImplicit(true)
+ return addr
+ }
+ return n
+}
+
+// The result of typecheckcomplit MUST be assigned back to n, e.g.
+// n.Left = typecheckcomplit(n.Left)
+func typecheckcomplit(n *ir.CompLitExpr) (res ir.Node) {
+ if base.EnableTrace && base.Flag.LowerT {
+ defer tracePrint("typecheckcomplit", n)(&res)
+ }
+
+ lno := base.Pos
+ defer func() {
+ base.Pos = lno
+ }()
+
+ if n.Ntype == nil {
+ base.ErrorfAt(n.Pos(), "missing type in composite literal")
+ n.SetType(nil)
+ return n
+ }
+
+ // Save original node (including n.Right)
+ n.SetOrig(ir.Copy(n))
+
+ ir.SetPos(n.Ntype)
+
+ // Need to handle [...]T arrays specially.
+ if array, ok := n.Ntype.(*ir.ArrayType); ok && array.Elem != nil && array.Len == nil {
+ array.Elem = check(array.Elem, ctxType)
+ elemType := array.Elem.Type()
+ if elemType == nil {
+ n.SetType(nil)
+ return n
+ }
+ length := typecheckarraylit(elemType, -1, n.List, "array literal")
+ n.SetOp(ir.OARRAYLIT)
+ n.SetType(types.NewArray(elemType, length))
+ n.Ntype = nil
+ return n
+ }
+
+ n.Ntype = ir.Node(check(n.Ntype, ctxType)).(ir.Ntype)
+ t := n.Ntype.Type()
+ if t == nil {
+ n.SetType(nil)
+ return n
+ }
+ n.SetType(t)
+
+ switch t.Kind() {
+ default:
+ base.Errorf("invalid composite literal type %v", t)
+ n.SetType(nil)
+
+ case types.TARRAY:
+ typecheckarraylit(t.Elem(), t.NumElem(), n.List, "array literal")
+ n.SetOp(ir.OARRAYLIT)
+ n.Ntype = nil
+
+ case types.TSLICE:
+ length := typecheckarraylit(t.Elem(), -1, n.List, "slice literal")
+ n.SetOp(ir.OSLICELIT)
+ n.Ntype = nil
+ n.Len = length
+
+ case types.TMAP:
+ var cs constSet
+ for i3, l := range n.List {
+ ir.SetPos(l)
+ if l.Op() != ir.OKEY {
+ n.List[i3] = Expr(l)
+ base.Errorf("missing key in map literal")
+ continue
+ }
+ l := l.(*ir.KeyExpr)
+
+ r := l.Key
+ r = pushtype(r, t.Key())
+ r = Expr(r)
+ l.Key = AssignConv(r, t.Key(), "map key")
+ cs.add(base.Pos, l.Key, "key", "map literal")
+
+ r = l.Value
+ r = pushtype(r, t.Elem())
+ r = Expr(r)
+ l.Value = AssignConv(r, t.Elem(), "map value")
+ }
+
+ n.SetOp(ir.OMAPLIT)
+ n.Ntype = nil
+
+ case types.TSTRUCT:
+ // Need valid field offsets for Xoffset below.
+ types.CalcSize(t)
+
+ errored := false
+ if len(n.List) != 0 && nokeys(n.List) {
+ // simple list of variables
+ ls := n.List
+ for i, n1 := range ls {
+ ir.SetPos(n1)
+ n1 = Expr(n1)
+ ls[i] = n1
+ if i >= t.NumFields() {
+ if !errored {
+ base.Errorf("too many values in %v", n)
+ errored = true
+ }
+ continue
+ }
+
+ f := t.Field(i)
+ s := f.Sym
+ if s != nil && !types.IsExported(s.Name) && s.Pkg != types.LocalPkg {
+ base.Errorf("implicit assignment of unexported field '%s' in %v literal", s.Name, t)
+ }
+ // No pushtype allowed here. Must name fields for that.
+ n1 = AssignConv(n1, f.Type, "field value")
+ sk := ir.NewStructKeyExpr(base.Pos, f.Sym, n1)
+ sk.Offset = f.Offset
+ ls[i] = sk
+ }
+ if len(ls) < t.NumFields() {
+ base.Errorf("too few values in %v", n)
+ }
+ } else {
+ hash := make(map[string]bool)
+
+ // keyed list
+ ls := n.List
+ for i, l := range ls {
+ ir.SetPos(l)
+
+ if l.Op() == ir.OKEY {
+ kv := l.(*ir.KeyExpr)
+ key := kv.Key
+
+ // Sym might have resolved to name in other top-level
+ // package, because of import dot. Redirect to correct sym
+ // before we do the lookup.
+ s := key.Sym()
+ if id, ok := key.(*ir.Ident); ok && DotImportRefs[id] != nil {
+ s = Lookup(s.Name)
+ }
+
+ // An OXDOT uses the Sym field to hold
+ // the field to the right of the dot,
+ // so s will be non-nil, but an OXDOT
+ // is never a valid struct literal key.
+ if s == nil || s.Pkg != types.LocalPkg || key.Op() == ir.OXDOT || s.IsBlank() {
+ base.Errorf("invalid field name %v in struct initializer", key)
+ continue
+ }
+
+ l = ir.NewStructKeyExpr(l.Pos(), s, kv.Value)
+ ls[i] = l
+ }
+
+ if l.Op() != ir.OSTRUCTKEY {
+ if !errored {
+ base.Errorf("mixture of field:value and value initializers")
+ errored = true
+ }
+ ls[i] = Expr(ls[i])
+ continue
+ }
+ l := l.(*ir.StructKeyExpr)
+
+ f := lookdot1(nil, l.Field, t, t.Fields(), 0)
+ if f == nil {
+ if ci := lookdot1(nil, l.Field, t, t.Fields(), 2); ci != nil { // Case-insensitive lookup.
+ if visible(ci.Sym) {
+ base.Errorf("unknown field '%v' in struct literal of type %v (but does have %v)", l.Field, t, ci.Sym)
+ } else if nonexported(l.Field) && l.Field.Name == ci.Sym.Name { // Ensure exactness before the suggestion.
+ base.Errorf("cannot refer to unexported field '%v' in struct literal of type %v", l.Field, t)
+ } else {
+ base.Errorf("unknown field '%v' in struct literal of type %v", l.Field, t)
+ }
+ continue
+ }
+ var f *types.Field
+ p, _ := dotpath(l.Field, t, &f, true)
+ if p == nil || f.IsMethod() {
+ base.Errorf("unknown field '%v' in struct literal of type %v", l.Field, t)
+ continue
+ }
+ // dotpath returns the parent embedded types in reverse order.
+ var ep []string
+ for ei := len(p) - 1; ei >= 0; ei-- {
+ ep = append(ep, p[ei].field.Sym.Name)
+ }
+ ep = append(ep, l.Field.Name)
+ base.Errorf("cannot use promoted field %v in struct literal of type %v", strings.Join(ep, "."), t)
+ continue
+ }
+ fielddup(f.Sym.Name, hash)
+ l.Offset = f.Offset
+
+ // No pushtype allowed here. Tried and rejected.
+ l.Value = Expr(l.Value)
+ l.Value = AssignConv(l.Value, f.Type, "field value")
+ }
+ }
+
+ n.SetOp(ir.OSTRUCTLIT)
+ n.Ntype = nil
+ }
+
+ return n
+}
+
+// typecheckarraylit type-checks a sequence of slice/array literal elements.
+func typecheckarraylit(elemType *types.Type, bound int64, elts []ir.Node, ctx string) int64 {
+ // If there are key/value pairs, create a map to keep seen
+ // keys so we can check for duplicate indices.
+ var indices map[int64]bool
+ for _, elt := range elts {
+ if elt.Op() == ir.OKEY {
+ indices = make(map[int64]bool)
+ break
+ }
+ }
+
+ var key, length int64
+ for i, elt := range elts {
+ ir.SetPos(elt)
+ r := elts[i]
+ var kv *ir.KeyExpr
+ if elt.Op() == ir.OKEY {
+ elt := elt.(*ir.KeyExpr)
+ elt.Key = Expr(elt.Key)
+ key = IndexConst(elt.Key)
+ if key < 0 {
+ if !elt.Key.Diag() {
+ if key == -2 {
+ base.Errorf("index too large")
+ } else {
+ base.Errorf("index must be non-negative integer constant")
+ }
+ elt.Key.SetDiag(true)
+ }
+ key = -(1 << 30) // stay negative for a while
+ }
+ kv = elt
+ r = elt.Value
+ }
+
+ r = pushtype(r, elemType)
+ r = Expr(r)
+ r = AssignConv(r, elemType, ctx)
+ if kv != nil {
+ kv.Value = r
+ } else {
+ elts[i] = r
+ }
+
+ if key >= 0 {
+ if indices != nil {
+ if indices[key] {
+ base.Errorf("duplicate index in %s: %d", ctx, key)
+ } else {
+ indices[key] = true
+ }
+ }
+
+ if bound >= 0 && key >= bound {
+ base.Errorf("array index %d out of bounds [0:%d]", key, bound)
+ bound = -1
+ }
+ }
+
+ key++
+ if key > length {
+ length = key
+ }
+ }
+
+ return length
+}
+
+// visible reports whether sym is exported or locally defined.
+func visible(sym *types.Sym) bool {
+ return sym != nil && (types.IsExported(sym.Name) || sym.Pkg == types.LocalPkg)
+}
+
+// nonexported reports whether sym is an unexported field.
+func nonexported(sym *types.Sym) bool {
+ return sym != nil && !types.IsExported(sym.Name)
+}
+
+func checklvalue(n ir.Node, verb string) {
+ if !ir.IsAssignable(n) {
+ base.Errorf("cannot %s %v", verb, n)
+ }
+}
+
+func checkassign(stmt ir.Node, n ir.Node) {
+ // Variables declared in ORANGE are assigned on every iteration.
+ if !ir.DeclaredBy(n, stmt) || stmt.Op() == ir.ORANGE {
+ r := ir.OuterValue(n)
+ if r.Op() == ir.ONAME {
+ r := r.(*ir.Name)
+ r.Name().SetAssigned(true)
+ if r.Name().IsClosureVar() {
+ r.Name().Defn.Name().SetAssigned(true)
+ }
+ }
+ }
+
+ if ir.IsAssignable(n) {
+ return
+ }
+ if n.Op() == ir.OINDEXMAP {
+ n := n.(*ir.IndexExpr)
+ n.Assigned = true
+ return
+ }
+
+ // have already complained about n being invalid
+ if n.Type() == nil {
+ return
+ }
+
+ switch {
+ case n.Op() == ir.ODOT && n.(*ir.SelectorExpr).X.Op() == ir.OINDEXMAP:
+ base.Errorf("cannot assign to struct field %v in map", n)
+ case (n.Op() == ir.OINDEX && n.(*ir.IndexExpr).X.Type().IsString()) || n.Op() == ir.OSLICESTR:
+ base.Errorf("cannot assign to %v (strings are immutable)", n)
+ case n.Op() == ir.OLITERAL && n.Sym() != nil && ir.IsConstNode(n):
+ base.Errorf("cannot assign to %v (declared const)", n)
+ default:
+ base.Errorf("cannot assign to %v", n)
+ }
+ n.SetType(nil)
+}
+
+func checkassignlist(stmt ir.Node, l ir.Nodes) {
+ for _, n := range l {
+ checkassign(stmt, n)
+ }
+}
+
+// type check assignment.
+// if this assignment is the definition of a var on the left side,
+// fill in the var's type.
+func typecheckas(n *ir.AssignStmt) {
+ if base.EnableTrace && base.Flag.LowerT {
+ defer tracePrint("typecheckas", n)(nil)
+ }
+
+ // delicate little dance.
+ // the definition of n may refer to this assignment
+ // as its definition, in which case it will call typecheckas.
+ // in that case, do not call typecheck back, or it will cycle.
+ // if the variable has a type (ntype) then typechecking
+ // will not look at defn, so it is okay (and desirable,
+ // so that the conversion below happens).
+ n.X = Resolve(n.X)
+
+ if !ir.DeclaredBy(n.X, n) || n.X.Name().Ntype != nil {
+ n.X = AssignExpr(n.X)
+ }
+
+ // Use ctxMultiOK so we can emit an "N variables but M values" error
+ // to be consistent with typecheckas2 (#26616).
+ n.Y = check(n.Y, ctxExpr|ctxMultiOK)
+ checkassign(n, n.X)
+ if n.Y != nil && n.Y.Type() != nil {
+ if n.Y.Type().IsFuncArgStruct() {
+ base.Errorf("assignment mismatch: 1 variable but %v returns %d values", n.Y.(*ir.CallExpr).X, n.Y.Type().NumFields())
+ // Multi-value RHS isn't actually valid for OAS; nil out
+ // to indicate failed typechecking.
+ n.Y.SetType(nil)
+ } else if n.X.Type() != nil {
+ n.Y = AssignConv(n.Y, n.X.Type(), "assignment")
+ }
+ }
+
+ if ir.DeclaredBy(n.X, n) && n.X.Name().Ntype == nil {
+ n.Y = DefaultLit(n.Y, nil)
+ n.X.SetType(n.Y.Type())
+ }
+
+ // second half of dance.
+ // now that right is done, typecheck the left
+ // just to get it over with. see dance above.
+ n.SetTypecheck(1)
+
+ if n.X.Typecheck() == 0 {
+ n.X = AssignExpr(n.X)
+ }
+ if !ir.IsBlank(n.X) {
+ types.CheckSize(n.X.Type()) // ensure width is calculated for backend
+ }
+}
+
+func checkassignto(src *types.Type, dst ir.Node) {
+ if op, why := assignop(src, dst.Type()); op == ir.OXXX {
+ base.Errorf("cannot assign %v to %L in multiple assignment%s", src, dst, why)
+ return
+ }
+}
+
+func typecheckas2(n *ir.AssignListStmt) {
+ if base.EnableTrace && base.Flag.LowerT {
+ defer tracePrint("typecheckas2", n)(nil)
+ }
+
+ ls := n.Lhs
+ for i1, n1 := range ls {
+ // delicate little dance.
+ n1 = Resolve(n1)
+ ls[i1] = n1
+
+ if !ir.DeclaredBy(n1, n) || n1.Name().Ntype != nil {
+ ls[i1] = AssignExpr(ls[i1])
+ }
+ }
+
+ cl := len(n.Lhs)
+ cr := len(n.Rhs)
+ if cl > 1 && cr == 1 {
+ n.Rhs[0] = check(n.Rhs[0], ctxExpr|ctxMultiOK)
+ } else {
+ Exprs(n.Rhs)
+ }
+ checkassignlist(n, n.Lhs)
+
+ var l ir.Node
+ var r ir.Node
+ if cl == cr {
+ // easy
+ ls := n.Lhs
+ rs := n.Rhs
+ for il, nl := range ls {
+ nr := rs[il]
+ if nl.Type() != nil && nr.Type() != nil {
+ rs[il] = AssignConv(nr, nl.Type(), "assignment")
+ }
+ if ir.DeclaredBy(nl, n) && nl.Name().Ntype == nil {
+ rs[il] = DefaultLit(rs[il], nil)
+ nl.SetType(rs[il].Type())
+ }
+ }
+
+ goto out
+ }
+
+ l = n.Lhs[0]
+ r = n.Rhs[0]
+
+ // x,y,z = f()
+ if cr == 1 {
+ if r.Type() == nil {
+ goto out
+ }
+ switch r.Op() {
+ case ir.OCALLMETH, ir.OCALLINTER, ir.OCALLFUNC:
+ if !r.Type().IsFuncArgStruct() {
+ break
+ }
+ cr = r.Type().NumFields()
+ if cr != cl {
+ goto mismatch
+ }
+ r.(*ir.CallExpr).Use = ir.CallUseList
+ n.SetOp(ir.OAS2FUNC)
+ for i, l := range n.Lhs {
+ f := r.Type().Field(i)
+ if f.Type != nil && l.Type() != nil {
+ checkassignto(f.Type, l)
+ }
+ if ir.DeclaredBy(l, n) && l.Name().Ntype == nil {
+ l.SetType(f.Type)
+ }
+ }
+ goto out
+ }
+ }
+
+ // x, ok = y
+ if cl == 2 && cr == 1 {
+ if r.Type() == nil {
+ goto out
+ }
+ switch r.Op() {
+ case ir.OINDEXMAP, ir.ORECV, ir.ODOTTYPE:
+ switch r.Op() {
+ case ir.OINDEXMAP:
+ n.SetOp(ir.OAS2MAPR)
+ case ir.ORECV:
+ n.SetOp(ir.OAS2RECV)
+ case ir.ODOTTYPE:
+ r := r.(*ir.TypeAssertExpr)
+ n.SetOp(ir.OAS2DOTTYPE)
+ r.SetOp(ir.ODOTTYPE2)
+ }
+ if l.Type() != nil {
+ checkassignto(r.Type(), l)
+ }
+ if ir.DeclaredBy(l, n) {
+ l.SetType(r.Type())
+ }
+ l := n.Lhs[1]
+ if l.Type() != nil && !l.Type().IsBoolean() {
+ checkassignto(types.Types[types.TBOOL], l)
+ }
+ if ir.DeclaredBy(l, n) && l.Name().Ntype == nil {
+ l.SetType(types.Types[types.TBOOL])
+ }
+ goto out
+ }
+ }
+
+mismatch:
+ switch r.Op() {
+ default:
+ base.Errorf("assignment mismatch: %d variables but %d values", cl, cr)
+ case ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER:
+ r := r.(*ir.CallExpr)
+ base.Errorf("assignment mismatch: %d variables but %v returns %d values", cl, r.X, cr)
+ }
+
+ // second half of dance
+out:
+ n.SetTypecheck(1)
+ ls = n.Lhs
+ for i1, n1 := range ls {
+ if n1.Typecheck() == 0 {
+ ls[i1] = AssignExpr(ls[i1])
+ }
+ }
+}
+
+// type check function definition
+// To be called by typecheck, not directly.
+// (Call typecheckFunc instead.)
+func typecheckfunc(n *ir.Func) {
+ if base.EnableTrace && base.Flag.LowerT {
+ defer tracePrint("typecheckfunc", n)(nil)
+ }
+
+ for _, ln := range n.Dcl {
+ if ln.Op() == ir.ONAME && (ln.Class_ == ir.PPARAM || ln.Class_ == ir.PPARAMOUT) {
+ ln.Decldepth = 1
+ }
+ }
+
+ n.Nname = AssignExpr(n.Nname).(*ir.Name)
+ t := n.Nname.Type()
+ if t == nil {
+ return
+ }
+ n.SetType(t)
+ rcvr := t.Recv()
+ if rcvr != nil && n.Shortname != nil {
+ m := addmethod(n, n.Shortname, t, true, n.Pragma&ir.Nointerface != 0)
+ if m == nil {
+ return
+ }
+
+ n.Nname.SetSym(ir.MethodSym(rcvr.Type, n.Shortname))
+ Declare(n.Nname, ir.PFUNC)
+ }
+
+ if base.Ctxt.Flag_dynlink && !inimport && n.Nname != nil {
+ NeedFuncSym(n.Sym())
+ }
+}
+
+// The result of stringtoruneslit MUST be assigned back to n, e.g.
+// n.Left = stringtoruneslit(n.Left)
+func stringtoruneslit(n *ir.ConvExpr) ir.Node {
+ if n.X.Op() != ir.OLITERAL || n.X.Val().Kind() != constant.String {
+ base.Fatalf("stringtoarraylit %v", n)
+ }
+
+ var l []ir.Node
+ i := 0
+ for _, r := range ir.StringVal(n.X) {
+ l = append(l, ir.NewKeyExpr(base.Pos, ir.NewInt(int64(i)), ir.NewInt(int64(r))))
+ i++
+ }
+
+ nn := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(n.Type()).(ir.Ntype), nil)
+ nn.List.Set(l)
+ return Expr(nn)
+}
+
+var mapqueue []*ir.MapType
+
+func CheckMapKeys() {
+ for _, n := range mapqueue {
+ k := n.Type().MapType().Key
+ if !k.Broke() && !types.IsComparable(k) {
+ base.ErrorfAt(n.Pos(), "invalid map key type %v", k)
+ }
+ }
+ mapqueue = nil
+}
+
+func typecheckdeftype(n *ir.Name) {
+ if base.EnableTrace && base.Flag.LowerT {
+ defer tracePrint("typecheckdeftype", n)(nil)
+ }
+
+ t := types.NewNamed(n)
+ t.Vargen = n.Vargen
+ if n.Pragma()&ir.NotInHeap != 0 {
+ t.SetNotInHeap(true)
+ }
+
+ n.SetType(t)
+ n.SetTypecheck(1)
+ n.SetWalkdef(1)
+
+ types.DeferCheckSize()
+ errorsBefore := base.Errors()
+ n.Ntype = typecheckNtype(n.Ntype)
+ if underlying := n.Ntype.Type(); underlying != nil {
+ t.SetUnderlying(underlying)
+ } else {
+ n.SetDiag(true)
+ n.SetType(nil)
+ }
+ if t.Kind() == types.TFORW && base.Errors() > errorsBefore {
+ // Something went wrong during type-checking,
+ // but it was reported. Silence future errors.
+ t.SetBroke(true)
+ }
+ types.ResumeCheckSize()
+}
+
+func typecheckdef(n ir.Node) {
+ if base.EnableTrace && base.Flag.LowerT {
+ defer tracePrint("typecheckdef", n)(nil)
+ }
+
+ lno := ir.SetPos(n)
+
+ if n.Op() == ir.ONONAME {
+ if !n.Diag() {
+ n.SetDiag(true)
+
+ // Note: adderrorname looks for this string and
+ // adds context about the outer expression
+ base.ErrorfAt(base.Pos, "undefined: %v", n.Sym())
+ }
+ base.Pos = lno
+ return
+ }
+
+ if n.Walkdef() == 1 {
+ base.Pos = lno
+ return
+ }
+
+ typecheckdefstack = append(typecheckdefstack, n)
+ if n.Walkdef() == 2 {
+ base.FlushErrors()
+ fmt.Printf("typecheckdef loop:")
+ for i := len(typecheckdefstack) - 1; i >= 0; i-- {
+ n := typecheckdefstack[i]
+ fmt.Printf(" %v", n.Sym())
+ }
+ fmt.Printf("\n")
+ base.Fatalf("typecheckdef loop")
+ }
+
+ n.SetWalkdef(2)
+
+ if n.Type() != nil || n.Sym() == nil { // builtin or no name
+ goto ret
+ }
+
+ switch n.Op() {
+ default:
+ base.Fatalf("typecheckdef %v", n.Op())
+
+ case ir.OLITERAL:
+ if n.Name().Ntype != nil {
+ n.Name().Ntype = typecheckNtype(n.Name().Ntype)
+ n.SetType(n.Name().Ntype.Type())
+ n.Name().Ntype = nil
+ if n.Type() == nil {
+ n.SetDiag(true)
+ goto ret
+ }
+ }
+
+ e := n.Name().Defn
+ n.Name().Defn = nil
+ if e == nil {
+ ir.Dump("typecheckdef nil defn", n)
+ base.ErrorfAt(n.Pos(), "xxx")
+ }
+
+ e = Expr(e)
+ if e.Type() == nil {
+ goto ret
+ }
+ if !ir.IsConstNode(e) {
+ if !e.Diag() {
+ if e.Op() == ir.ONIL {
+ base.ErrorfAt(n.Pos(), "const initializer cannot be nil")
+ } else {
+ base.ErrorfAt(n.Pos(), "const initializer %v is not a constant", e)
+ }
+ e.SetDiag(true)
+ }
+ goto ret
+ }
+
+ t := n.Type()
+ if t != nil {
+ if !ir.OKForConst[t.Kind()] {
+ base.ErrorfAt(n.Pos(), "invalid constant type %v", t)
+ goto ret
+ }
+
+ if !e.Type().IsUntyped() && !types.Identical(t, e.Type()) {
+ base.ErrorfAt(n.Pos(), "cannot use %L as type %v in const initializer", e, t)
+ goto ret
+ }
+
+ e = convlit(e, t)
+ }
+
+ n.SetType(e.Type())
+ if n.Type() != nil {
+ n.SetVal(e.Val())
+ }
+
+ case ir.ONAME:
+ n := n.(*ir.Name)
+ if n.Name().Ntype != nil {
+ n.Name().Ntype = typecheckNtype(n.Name().Ntype)
+ n.SetType(n.Name().Ntype.Type())
+ if n.Type() == nil {
+ n.SetDiag(true)
+ goto ret
+ }
+ }
+
+ if n.Type() != nil {
+ break
+ }
+ if n.Name().Defn == nil {
+ if n.BuiltinOp != 0 { // like OPRINTN
+ break
+ }
+ if base.Errors() > 0 {
+ // Can have undefined variables in x := foo
+ // that make x have an n.name.Defn == nil.
+ // If there are other errors anyway, don't
+ // bother adding to the noise.
+ break
+ }
+
+ base.Fatalf("var without type, init: %v", n.Sym())
+ }
+
+ if n.Name().Defn.Op() == ir.ONAME {
+ n.Name().Defn = Expr(n.Name().Defn)
+ n.SetType(n.Name().Defn.Type())
+ break
+ }
+
+ n.Name().Defn = Stmt(n.Name().Defn) // fills in n.Type
+
+ case ir.OTYPE:
+ n := n.(*ir.Name)
+ if n.Alias() {
+ // Type alias declaration: Simply use the rhs type - no need
+ // to create a new type.
+ // If we have a syntax error, name.Ntype may be nil.
+ if n.Ntype != nil {
+ n.Ntype = typecheckNtype(n.Ntype)
+ n.SetType(n.Ntype.Type())
+ if n.Type() == nil {
+ n.SetDiag(true)
+ goto ret
+ }
+ // For package-level type aliases, set n.Sym.Def so we can identify
+ // it as a type alias during export. See also #31959.
+ if n.Curfn == nil {
+ n.Sym().Def = n.Ntype
+ }
+ }
+ break
+ }
+
+ // regular type declaration
+ typecheckdeftype(n)
+ }
+
+ret:
+ if n.Op() != ir.OLITERAL && n.Type() != nil && n.Type().IsUntyped() {
+ base.Fatalf("got %v for %v", n.Type(), n)
+ }
+ last := len(typecheckdefstack) - 1
+ if typecheckdefstack[last] != n {
+ base.Fatalf("typecheckdefstack mismatch")
+ }
+ typecheckdefstack[last] = nil
+ typecheckdefstack = typecheckdefstack[:last]
+
+ base.Pos = lno
+ n.SetWalkdef(1)
+}
+
+func checkmake(t *types.Type, arg string, np *ir.Node) bool {
+ n := *np
+ if !n.Type().IsInteger() && n.Type().Kind() != types.TIDEAL {
+ base.Errorf("non-integer %s argument in make(%v) - %v", arg, t, n.Type())
+ return false
+ }
+
+ // Do range checks for constants before defaultlit
+ // to avoid redundant "constant NNN overflows int" errors.
+ if n.Op() == ir.OLITERAL {
+ v := toint(n.Val())
+ if constant.Sign(v) < 0 {
+ base.Errorf("negative %s argument in make(%v)", arg, t)
+ return false
+ }
+ if ir.ConstOverflow(v, types.Types[types.TINT]) {
+ base.Errorf("%s argument too large in make(%v)", arg, t)
+ return false
+ }
+ }
+
+ // defaultlit is necessary for non-constants too: n might be 1.1<<k.
+ // TODO(gri) The length argument requirements for (array/slice) make
+ // are the same as for index expressions. Factor the code better;
+ // for instance, indexlit might be called here and incorporate some
+ // of the bounds checks done for make.
+ n = DefaultLit(n, types.Types[types.TINT])
+ *np = n
+
+ return true
+}
+
+// markBreak marks control statements containing break statements with SetHasBreak(true).
+func markBreak(fn *ir.Func) {
+ var labels map[*types.Sym]ir.Node
+ var implicit ir.Node
+
+ var mark func(ir.Node) error
+ mark = func(n ir.Node) error {
+ switch n.Op() {
+ default:
+ ir.DoChildren(n, mark)
+
+ case ir.OBREAK:
+ n := n.(*ir.BranchStmt)
+ if n.Label == nil {
+ setHasBreak(implicit)
+ } else {
+ setHasBreak(labels[n.Label])
+ }
+
+ case ir.OFOR, ir.OFORUNTIL, ir.OSWITCH, ir.OSELECT, ir.ORANGE:
+ old := implicit
+ implicit = n
+ var sym *types.Sym
+ switch n := n.(type) {
+ case *ir.ForStmt:
+ sym = n.Label
+ case *ir.RangeStmt:
+ sym = n.Label
+ case *ir.SelectStmt:
+ sym = n.Label
+ case *ir.SwitchStmt:
+ sym = n.Label
+ }
+ if sym != nil {
+ if labels == nil {
+ // Map creation delayed until we need it - most functions don't.
+ labels = make(map[*types.Sym]ir.Node)
+ }
+ labels[sym] = n
+ }
+ ir.DoChildren(n, mark)
+ if sym != nil {
+ delete(labels, sym)
+ }
+ implicit = old
+ }
+ return nil
+ }
+
+ mark(fn)
+}
+
+func controlLabel(n ir.Node) *types.Sym {
+ switch n := n.(type) {
+ default:
+ base.Fatalf("controlLabel %+v", n.Op())
+ return nil
+ case *ir.ForStmt:
+ return n.Label
+ case *ir.RangeStmt:
+ return n.Label
+ case *ir.SelectStmt:
+ return n.Label
+ case *ir.SwitchStmt:
+ return n.Label
+ }
+}
+
+func setHasBreak(n ir.Node) {
+ switch n := n.(type) {
+ default:
+ base.Fatalf("setHasBreak %+v", n.Op())
+ case nil:
+ // ignore
+ case *ir.ForStmt:
+ n.HasBreak = true
+ case *ir.RangeStmt:
+ n.HasBreak = true
+ case *ir.SelectStmt:
+ n.HasBreak = true
+ case *ir.SwitchStmt:
+ n.HasBreak = true
+ }
+}
+
+// isTermNodes reports whether the Nodes list ends with a terminating statement.
+func isTermNodes(l ir.Nodes) bool {
+ s := l
+ c := len(s)
+ if c == 0 {
+ return false
+ }
+ return isTermNode(s[c-1])
+}
+
+// isTermNode reports whether the node n, the last one in a
+// statement list, is a terminating statement.
+func isTermNode(n ir.Node) bool {
+ switch n.Op() {
+ // NOTE: OLABEL is treated as a separate statement,
+ // not a separate prefix, so skipping to the last statement
+ // in the block handles the labeled statement case by
+ // skipping over the label. No case OLABEL here.
+
+ case ir.OBLOCK:
+ n := n.(*ir.BlockStmt)
+ return isTermNodes(n.List)
+
+ case ir.OGOTO, ir.ORETURN, ir.ORETJMP, ir.OPANIC, ir.OFALL:
+ return true
+
+ case ir.OFOR, ir.OFORUNTIL:
+ n := n.(*ir.ForStmt)
+ if n.Cond != nil {
+ return false
+ }
+ if n.HasBreak {
+ return false
+ }
+ return true
+
+ case ir.OIF:
+ n := n.(*ir.IfStmt)
+ return isTermNodes(n.Body) && isTermNodes(n.Else)
+
+ case ir.OSWITCH:
+ n := n.(*ir.SwitchStmt)
+ if n.HasBreak {
+ return false
+ }
+ def := false
+ for _, cas := range n.Cases {
+ cas := cas.(*ir.CaseStmt)
+ if !isTermNodes(cas.Body) {
+ return false
+ }
+ if len(cas.List) == 0 { // default
+ def = true
+ }
+ }
+ return def
+
+ case ir.OSELECT:
+ n := n.(*ir.SelectStmt)
+ if n.HasBreak {
+ return false
+ }
+ for _, cas := range n.Cases {
+ cas := cas.(*ir.CaseStmt)
+ if !isTermNodes(cas.Body) {
+ return false
+ }
+ }
+ return true
+ }
+
+ return false
+}
+
+// CheckReturn makes sure that fn terminates appropriately.
+func CheckReturn(fn *ir.Func) {
+ if fn.Type().NumResults() != 0 && len(fn.Body) != 0 {
+ markBreak(fn)
+ if !isTermNodes(fn.Body) {
+ base.ErrorfAt(fn.Endlineno, "missing return at end of function")
+ }
+ }
+}
+
+func deadcode(fn *ir.Func) {
+ deadcodeslice(&fn.Body)
+
+ if len(fn.Body) == 0 {
+ return
+ }
+
+ for _, n := range fn.Body {
+ if len(n.Init()) > 0 {
+ return
+ }
+ switch n.Op() {
+ case ir.OIF:
+ n := n.(*ir.IfStmt)
+ if !ir.IsConst(n.Cond, constant.Bool) || len(n.Body) > 0 || len(n.Else) > 0 {
+ return
+ }
+ case ir.OFOR:
+ n := n.(*ir.ForStmt)
+ if !ir.IsConst(n.Cond, constant.Bool) || ir.BoolVal(n.Cond) {
+ return
+ }
+ default:
+ return
+ }
+ }
+
+ fn.Body.Set([]ir.Node{ir.NewBlockStmt(base.Pos, nil)})
+}
+
+func deadcodeslice(nn *ir.Nodes) {
+ var lastLabel = -1
+ for i, n := range *nn {
+ if n != nil && n.Op() == ir.OLABEL {
+ lastLabel = i
+ }
+ }
+ for i, n := range *nn {
+ // Cut is set to true when all nodes after i'th position
+ // should be removed.
+ // In other words, it marks whole slice "tail" as dead.
+ cut := false
+ if n == nil {
+ continue
+ }
+ if n.Op() == ir.OIF {
+ n := n.(*ir.IfStmt)
+ n.Cond = deadcodeexpr(n.Cond)
+ if ir.IsConst(n.Cond, constant.Bool) {
+ var body ir.Nodes
+ if ir.BoolVal(n.Cond) {
+ n.Else = ir.Nodes{}
+ body = n.Body
+ } else {
+ n.Body = ir.Nodes{}
+ body = n.Else
+ }
+ // If "then" or "else" branch ends with panic or return statement,
+ // it is safe to remove all statements after this node.
+ // isterminating is not used to avoid goto-related complications.
+ // We must be careful not to deadcode-remove labels, as they
+ // might be the target of a goto. See issue 28616.
+ if body := body; len(body) != 0 {
+ switch body[(len(body) - 1)].Op() {
+ case ir.ORETURN, ir.ORETJMP, ir.OPANIC:
+ if i > lastLabel {
+ cut = true
+ }
+ }
+ }
+ }
+ }
+
+ deadcodeslice(n.PtrInit())
+ switch n.Op() {
+ case ir.OBLOCK:
+ n := n.(*ir.BlockStmt)
+ deadcodeslice(&n.List)
+ case ir.OCASE:
+ n := n.(*ir.CaseStmt)
+ deadcodeslice(&n.Body)
+ case ir.OFOR:
+ n := n.(*ir.ForStmt)
+ deadcodeslice(&n.Body)
+ case ir.OIF:
+ n := n.(*ir.IfStmt)
+ deadcodeslice(&n.Body)
+ deadcodeslice(&n.Else)
+ case ir.ORANGE:
+ n := n.(*ir.RangeStmt)
+ deadcodeslice(&n.Body)
+ case ir.OSELECT:
+ n := n.(*ir.SelectStmt)
+ deadcodeslice(&n.Cases)
+ case ir.OSWITCH:
+ n := n.(*ir.SwitchStmt)
+ deadcodeslice(&n.Cases)
+ }
+
+ if cut {
+ nn.Set((*nn)[:i+1])
+ break
+ }
+ }
+}
+
+func deadcodeexpr(n ir.Node) ir.Node {
+ // Perform dead-code elimination on short-circuited boolean
+ // expressions involving constants with the intent of
+ // producing a constant 'if' condition.
+ switch n.Op() {
+ case ir.OANDAND:
+ n := n.(*ir.LogicalExpr)
+ n.X = deadcodeexpr(n.X)
+ n.Y = deadcodeexpr(n.Y)
+ if ir.IsConst(n.X, constant.Bool) {
+ if ir.BoolVal(n.X) {
+ return n.Y // true && x => x
+ } else {
+ return n.X // false && x => false
+ }
+ }
+ case ir.OOROR:
+ n := n.(*ir.LogicalExpr)
+ n.X = deadcodeexpr(n.X)
+ n.Y = deadcodeexpr(n.Y)
+ if ir.IsConst(n.X, constant.Bool) {
+ if ir.BoolVal(n.X) {
+ return n.X // true || x => true
+ } else {
+ return n.Y // false || x => x
+ }
+ }
+ }
+ return n
+}
+
+// getIotaValue returns the current value for "iota",
+// or -1 if not within a ConstSpec.
+func getIotaValue() int64 {
+ if i := len(typecheckdefstack); i > 0 {
+ if x := typecheckdefstack[i-1]; x.Op() == ir.OLITERAL {
+ return x.(*ir.Name).Iota()
+ }
+ }
+
+ if ir.CurFunc != nil && ir.CurFunc.Iota >= 0 {
+ return ir.CurFunc.Iota
+ }
+
+ return -1
+}
+
+// curpkg returns the current package, based on Curfn.
+func curpkg() *types.Pkg {
+ fn := ir.CurFunc
+ if fn == nil {
+ // Initialization expressions for package-scope variables.
+ return types.LocalPkg
+ }
+ return fnpkg(fn.Nname)
+}
+
+func Conv(n ir.Node, t *types.Type) ir.Node {
+ if types.Identical(n.Type(), t) {
+ return n
+ }
+ n = ir.NewConvExpr(base.Pos, ir.OCONV, nil, n)
+ n.SetType(t)
+ n = Expr(n)
+ return n
+}
+
+// ConvNop converts node n to type t using the OCONVNOP op
+// and typechecks the result with ctxExpr.
+func ConvNop(n ir.Node, t *types.Type) ir.Node {
+ if types.Identical(n.Type(), t) {
+ return n
+ }
+ n = ir.NewConvExpr(base.Pos, ir.OCONVNOP, nil, n)
+ n.SetType(t)
+ n = Expr(n)
+ return n
+}
diff --git a/src/cmd/compile/internal/typecheck/universe.go b/src/cmd/compile/internal/typecheck/universe.go
new file mode 100644
index 0000000000..fc8e962e28
--- /dev/null
+++ b/src/cmd/compile/internal/typecheck/universe.go
@@ -0,0 +1,362 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typecheck
+
+import (
+ "go/constant"
+
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/types"
+ "cmd/internal/src"
+)
+
+var (
+ okfor [ir.OEND][]bool
+ iscmp [ir.OEND]bool
+)
+
+var (
+ okforeq [types.NTYPE]bool
+ okforadd [types.NTYPE]bool
+ okforand [types.NTYPE]bool
+ okfornone [types.NTYPE]bool
+ okforbool [types.NTYPE]bool
+ okforcap [types.NTYPE]bool
+ okforlen [types.NTYPE]bool
+ okforarith [types.NTYPE]bool
+)
+
+var basicTypes = [...]struct {
+ name string
+ etype types.Kind
+}{
+ {"int8", types.TINT8},
+ {"int16", types.TINT16},
+ {"int32", types.TINT32},
+ {"int64", types.TINT64},
+ {"uint8", types.TUINT8},
+ {"uint16", types.TUINT16},
+ {"uint32", types.TUINT32},
+ {"uint64", types.TUINT64},
+ {"float32", types.TFLOAT32},
+ {"float64", types.TFLOAT64},
+ {"complex64", types.TCOMPLEX64},
+ {"complex128", types.TCOMPLEX128},
+ {"bool", types.TBOOL},
+ {"string", types.TSTRING},
+}
+
+var typedefs = [...]struct {
+ name string
+ etype types.Kind
+ sameas32 types.Kind
+ sameas64 types.Kind
+}{
+ {"int", types.TINT, types.TINT32, types.TINT64},
+ {"uint", types.TUINT, types.TUINT32, types.TUINT64},
+ {"uintptr", types.TUINTPTR, types.TUINT32, types.TUINT64},
+}
+
+var builtinFuncs = [...]struct {
+ name string
+ op ir.Op
+}{
+ {"append", ir.OAPPEND},
+ {"cap", ir.OCAP},
+ {"close", ir.OCLOSE},
+ {"complex", ir.OCOMPLEX},
+ {"copy", ir.OCOPY},
+ {"delete", ir.ODELETE},
+ {"imag", ir.OIMAG},
+ {"len", ir.OLEN},
+ {"make", ir.OMAKE},
+ {"new", ir.ONEW},
+ {"panic", ir.OPANIC},
+ {"print", ir.OPRINT},
+ {"println", ir.OPRINTN},
+ {"real", ir.OREAL},
+ {"recover", ir.ORECOVER},
+}
+
+var unsafeFuncs = [...]struct {
+ name string
+ op ir.Op
+}{
+ {"Alignof", ir.OALIGNOF},
+ {"Offsetof", ir.OOFFSETOF},
+ {"Sizeof", ir.OSIZEOF},
+}
+
+// initUniverse initializes the universe block.
+func initUniverse() {
+ if types.PtrSize == 0 {
+ base.Fatalf("typeinit before betypeinit")
+ }
+
+ types.SlicePtrOffset = 0
+ types.SliceLenOffset = types.Rnd(types.SlicePtrOffset+int64(types.PtrSize), int64(types.PtrSize))
+ types.SliceCapOffset = types.Rnd(types.SliceLenOffset+int64(types.PtrSize), int64(types.PtrSize))
+ types.SliceSize = types.Rnd(types.SliceCapOffset+int64(types.PtrSize), int64(types.PtrSize))
+
+ // string is same as slice wo the cap
+ types.StringSize = types.Rnd(types.SliceLenOffset+int64(types.PtrSize), int64(types.PtrSize))
+
+ for et := types.Kind(0); et < types.NTYPE; et++ {
+ types.SimType[et] = et
+ }
+
+ types.Types[types.TANY] = types.New(types.TANY)
+ types.Types[types.TINTER] = types.NewInterface(types.LocalPkg, nil)
+
+ defBasic := func(kind types.Kind, pkg *types.Pkg, name string) *types.Type {
+ sym := pkg.Lookup(name)
+ n := ir.NewDeclNameAt(src.NoXPos, ir.OTYPE, sym)
+ t := types.NewBasic(kind, n)
+ n.SetType(t)
+ sym.Def = n
+ if kind != types.TANY {
+ types.CalcSize(t)
+ }
+ return t
+ }
+
+ for _, s := range &basicTypes {
+ types.Types[s.etype] = defBasic(s.etype, types.BuiltinPkg, s.name)
+ }
+
+ for _, s := range &typedefs {
+ sameas := s.sameas32
+ if types.PtrSize == 8 {
+ sameas = s.sameas64
+ }
+ types.SimType[s.etype] = sameas
+
+ types.Types[s.etype] = defBasic(s.etype, types.BuiltinPkg, s.name)
+ }
+
+ // We create separate byte and rune types for better error messages
+ // rather than just creating type alias *types.Sym's for the uint8 and
+ // int32 types. Hence, (bytetype|runtype).Sym.isAlias() is false.
+ // TODO(gri) Should we get rid of this special case (at the cost
+ // of less informative error messages involving bytes and runes)?
+ // (Alternatively, we could introduce an OTALIAS node representing
+ // type aliases, albeit at the cost of having to deal with it everywhere).
+ types.ByteType = defBasic(types.TUINT8, types.BuiltinPkg, "byte")
+ types.RuneType = defBasic(types.TINT32, types.BuiltinPkg, "rune")
+
+ // error type
+ s := types.BuiltinPkg.Lookup("error")
+ n := ir.NewDeclNameAt(src.NoXPos, ir.OTYPE, s)
+ types.ErrorType = types.NewNamed(n)
+ types.ErrorType.SetUnderlying(makeErrorInterface())
+ n.SetType(types.ErrorType)
+ s.Def = n
+ types.CalcSize(types.ErrorType)
+
+ types.Types[types.TUNSAFEPTR] = defBasic(types.TUNSAFEPTR, ir.Pkgs.Unsafe, "Pointer")
+
+ // simple aliases
+ types.SimType[types.TMAP] = types.TPTR
+ types.SimType[types.TCHAN] = types.TPTR
+ types.SimType[types.TFUNC] = types.TPTR
+ types.SimType[types.TUNSAFEPTR] = types.TPTR
+
+ for _, s := range &builtinFuncs {
+ s2 := types.BuiltinPkg.Lookup(s.name)
+ def := NewName(s2)
+ def.BuiltinOp = s.op
+ s2.Def = def
+ }
+
+ for _, s := range &unsafeFuncs {
+ s2 := ir.Pkgs.Unsafe.Lookup(s.name)
+ def := NewName(s2)
+ def.BuiltinOp = s.op
+ s2.Def = def
+ }
+
+ s = types.BuiltinPkg.Lookup("true")
+ s.Def = ir.NewConstAt(src.NoXPos, s, types.UntypedBool, constant.MakeBool(true))
+
+ s = types.BuiltinPkg.Lookup("false")
+ s.Def = ir.NewConstAt(src.NoXPos, s, types.UntypedBool, constant.MakeBool(false))
+
+ s = Lookup("_")
+ types.BlankSym = s
+ s.Block = -100
+ s.Def = NewName(s)
+ types.Types[types.TBLANK] = types.New(types.TBLANK)
+ ir.AsNode(s.Def).SetType(types.Types[types.TBLANK])
+ ir.BlankNode = ir.AsNode(s.Def)
+ ir.BlankNode.SetTypecheck(1)
+
+ s = types.BuiltinPkg.Lookup("_")
+ s.Block = -100
+ s.Def = NewName(s)
+ types.Types[types.TBLANK] = types.New(types.TBLANK)
+ ir.AsNode(s.Def).SetType(types.Types[types.TBLANK])
+
+ types.Types[types.TNIL] = types.New(types.TNIL)
+ s = types.BuiltinPkg.Lookup("nil")
+ nnil := NodNil()
+ nnil.(*ir.NilExpr).SetSym(s)
+ s.Def = nnil
+
+ s = types.BuiltinPkg.Lookup("iota")
+ s.Def = ir.NewIota(base.Pos, s)
+
+ for et := types.TINT8; et <= types.TUINT64; et++ {
+ types.IsInt[et] = true
+ }
+ types.IsInt[types.TINT] = true
+ types.IsInt[types.TUINT] = true
+ types.IsInt[types.TUINTPTR] = true
+
+ types.IsFloat[types.TFLOAT32] = true
+ types.IsFloat[types.TFLOAT64] = true
+
+ types.IsComplex[types.TCOMPLEX64] = true
+ types.IsComplex[types.TCOMPLEX128] = true
+
+ // initialize okfor
+ for et := types.Kind(0); et < types.NTYPE; et++ {
+ if types.IsInt[et] || et == types.TIDEAL {
+ okforeq[et] = true
+ types.IsOrdered[et] = true
+ okforarith[et] = true
+ okforadd[et] = true
+ okforand[et] = true
+ ir.OKForConst[et] = true
+ types.IsSimple[et] = true
+ }
+
+ if types.IsFloat[et] {
+ okforeq[et] = true
+ types.IsOrdered[et] = true
+ okforadd[et] = true
+ okforarith[et] = true
+ ir.OKForConst[et] = true
+ types.IsSimple[et] = true
+ }
+
+ if types.IsComplex[et] {
+ okforeq[et] = true
+ okforadd[et] = true
+ okforarith[et] = true
+ ir.OKForConst[et] = true
+ types.IsSimple[et] = true
+ }
+ }
+
+ types.IsSimple[types.TBOOL] = true
+
+ okforadd[types.TSTRING] = true
+
+ okforbool[types.TBOOL] = true
+
+ okforcap[types.TARRAY] = true
+ okforcap[types.TCHAN] = true
+ okforcap[types.TSLICE] = true
+
+ ir.OKForConst[types.TBOOL] = true
+ ir.OKForConst[types.TSTRING] = true
+
+ okforlen[types.TARRAY] = true
+ okforlen[types.TCHAN] = true
+ okforlen[types.TMAP] = true
+ okforlen[types.TSLICE] = true
+ okforlen[types.TSTRING] = true
+
+ okforeq[types.TPTR] = true
+ okforeq[types.TUNSAFEPTR] = true
+ okforeq[types.TINTER] = true
+ okforeq[types.TCHAN] = true
+ okforeq[types.TSTRING] = true
+ okforeq[types.TBOOL] = true
+ okforeq[types.TMAP] = true // nil only; refined in typecheck
+ okforeq[types.TFUNC] = true // nil only; refined in typecheck
+ okforeq[types.TSLICE] = true // nil only; refined in typecheck
+ okforeq[types.TARRAY] = true // only if element type is comparable; refined in typecheck
+ okforeq[types.TSTRUCT] = true // only if all struct fields are comparable; refined in typecheck
+
+ types.IsOrdered[types.TSTRING] = true
+
+ for i := range okfor {
+ okfor[i] = okfornone[:]
+ }
+
+ // binary
+ okfor[ir.OADD] = okforadd[:]
+ okfor[ir.OAND] = okforand[:]
+ okfor[ir.OANDAND] = okforbool[:]
+ okfor[ir.OANDNOT] = okforand[:]
+ okfor[ir.ODIV] = okforarith[:]
+ okfor[ir.OEQ] = okforeq[:]
+ okfor[ir.OGE] = types.IsOrdered[:]
+ okfor[ir.OGT] = types.IsOrdered[:]
+ okfor[ir.OLE] = types.IsOrdered[:]
+ okfor[ir.OLT] = types.IsOrdered[:]
+ okfor[ir.OMOD] = okforand[:]
+ okfor[ir.OMUL] = okforarith[:]
+ okfor[ir.ONE] = okforeq[:]
+ okfor[ir.OOR] = okforand[:]
+ okfor[ir.OOROR] = okforbool[:]
+ okfor[ir.OSUB] = okforarith[:]
+ okfor[ir.OXOR] = okforand[:]
+ okfor[ir.OLSH] = okforand[:]
+ okfor[ir.ORSH] = okforand[:]
+
+ // unary
+ okfor[ir.OBITNOT] = okforand[:]
+ okfor[ir.ONEG] = okforarith[:]
+ okfor[ir.ONOT] = okforbool[:]
+ okfor[ir.OPLUS] = okforarith[:]
+
+ // special
+ okfor[ir.OCAP] = okforcap[:]
+ okfor[ir.OLEN] = okforlen[:]
+
+ // comparison
+ iscmp[ir.OLT] = true
+ iscmp[ir.OGT] = true
+ iscmp[ir.OGE] = true
+ iscmp[ir.OLE] = true
+ iscmp[ir.OEQ] = true
+ iscmp[ir.ONE] = true
+}
+
+func makeErrorInterface() *types.Type {
+ sig := types.NewSignature(types.NoPkg, fakeRecvField(), nil, []*types.Field{
+ types.NewField(src.NoXPos, nil, types.Types[types.TSTRING]),
+ })
+ method := types.NewField(src.NoXPos, Lookup("Error"), sig)
+ return types.NewInterface(types.NoPkg, []*types.Field{method})
+}
+
+// declareUniverse makes the universe block visible within the current package.
+func declareUniverse() {
+ // Operationally, this is similar to a dot import of builtinpkg, except
+ // that we silently skip symbols that are already declared in the
+ // package block rather than emitting a redeclared symbol error.
+
+ for _, s := range types.BuiltinPkg.Syms {
+ if s.Def == nil {
+ continue
+ }
+ s1 := Lookup(s.Name)
+ if s1.Def != nil {
+ continue
+ }
+
+ s1.Def = s.Def
+ s1.Block = s.Block
+ }
+
+ ir.RegFP = NewName(Lookup(".fp"))
+ ir.RegFP.SetType(types.Types[types.TINT32])
+ ir.RegFP.Class_ = ir.PPARAM
+ ir.RegFP.SetUsed(true)
+}