aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/compile/internal/gc
diff options
context:
space:
mode:
authorRuss Cox <rsc@golang.org>2020-12-23 00:41:49 -0500
committerRuss Cox <rsc@golang.org>2020-12-23 06:38:26 +0000
commitb9693d7627089204e6c2448f543c3512d86dae70 (patch)
tree0f869f5abb58568525f47d330e93ef36e1467bf2 /src/cmd/compile/internal/gc
parentdac0de3748cc816352da56f516506f80c33db4a5 (diff)
downloadgo-b9693d7627089204e6c2448f543c3512d86dae70.tar.gz
go-b9693d7627089204e6c2448f543c3512d86dae70.zip
[dev.regabi] cmd/compile: split out package typecheck [generated]
This commit splits the typechecking logic into its own package, the first of a sequence of CLs to break package gc into more manageable units. [git-generate] cd src/cmd/compile/internal/gc rf ' # The binary import/export has to be part of typechecking, # because we load inlined function bodies lazily, but "exporter" # should not be. Move that out of bexport.go. mv exporter exporter.markObject exporter.markType export.go # Use the typechecking helpers, so that the calls left behind # in package gc do not need access to ctxExpr etc. ex { import "cmd/compile/internal/ir" # TODO(rsc): Should not be necessary. avoid TypecheckExpr avoid TypecheckStmt avoid TypecheckExprs avoid TypecheckStmts avoid TypecheckAssignExpr avoid TypecheckCallee var n ir.Node var ns []ir.Node typecheck(n, ctxExpr) -> TypecheckExpr(n) typecheck(n, ctxStmt) -> TypecheckStmt(n) typecheckslice(ns, ctxExpr) -> TypecheckExprs(ns) typecheckslice(ns, ctxStmt) -> TypecheckStmts(ns) typecheck(n, ctxExpr|ctxAssign) -> TypecheckAssignExpr(n) typecheck(n, ctxExpr|ctxCallee) -> TypecheckCallee(n) } # Move some typechecking API to typecheck. mv syslook LookupRuntime mv substArgTypes SubstArgTypes mv LookupRuntime SubstArgTypes syms.go mv conv Conv mv convnop ConvNop mv Conv ConvNop typecheck.go mv colasdefn AssignDefn mv colasname assignableName mv Target target.go mv initname autoexport exportsym dcl.go mv exportsym Export # Export API to be called from outside typecheck. # The ones with "Typecheck" prefixes will be renamed later to drop the prefix. mv adddot AddImplicitDots mv assignconv AssignConv mv expandmeth CalcMethods mv capturevarscomplete CaptureVarsComplete mv checkMapKeys CheckMapKeys mv checkreturn CheckReturn mv dclcontext DeclContext mv dclfunc DeclFunc mv declare Declare mv dotImportRefs DotImportRefs mv declImporter DeclImporter mv variter DeclVars mv defaultlit DefaultLit mv evalConst EvalConst mv expandInline ImportBody mv finishUniverse declareUniverse mv funcbody FinishFuncBody mv funchdr StartFuncBody mv indexconst IndexConst mv initTodo InitTodoFunc mv lookup Lookup mv resolve Resolve mv lookupN LookupNum mv nodAddr NodAddr mv nodAddrAt NodAddrAt mv nodnil NodNil mv origBoolConst OrigBool mv origConst OrigConst mv origIntConst OrigInt mv redeclare Redeclared mv tostruct NewStructType mv functype NewFuncType mv methodfunc NewMethodType mv structargs NewFuncParams mv temp Temp mv tempAt TempAt mv typecheckok TypecheckAllowed mv typecheck _typecheck # make room for typecheck pkg mv typecheckinl TypecheckImportedBody mv typecheckFunc TypecheckFunc mv iimport ReadImports mv iexport WriteExports mv sysfunc LookupRuntimeFunc mv sysvar LookupRuntimeVar # Move function constructors to typecheck. mv mkdotargslice MakeDotArgs mv fixVariadicCall FixVariadicCall mv closureType ClosureType mv partialCallType PartialCallType mv capturevars CaptureVars mv MakeDotArgs FixVariadicCall ClosureType PartialCallType CaptureVars typecheckclosure func.go mv autolabel AutoLabel mv AutoLabel syms.go mv Dlist dlist mv Symlink symlink mv \ AssignDefn assignableName \ AssignConv \ CaptureVarsComplete \ DeclContext \ DeclFunc \ DeclImporter \ DeclVars \ Declare \ DotImportRefs \ Export \ InitTodoFunc \ Lookup \ LookupNum \ LookupRuntimeFunc \ LookupRuntimeVar \ NewFuncParams \ NewName \ NodAddr \ NodAddrAt \ NodNil \ Redeclared \ StartFuncBody \ FinishFuncBody \ TypecheckImportedBody \ AddImplicitDots \ CalcMethods \ CheckFuncStack \ NewFuncType \ NewMethodType \ NewStructType \ TypecheckAllowed \ Temp \ TempAt \ adddot1 \ dotlist \ addmethod \ assignconvfn \ assignop \ autotmpname \ autoexport \ bexport.go \ checkdupfields \ checkembeddedtype \ closurename \ convertop \ declare_typegen \ decldepth \ dlist \ dotpath \ expand0 \ expand1 \ expandDecl \ fakeRecvField \ fnpkg \ funcStack \ funcStackEnt \ funcarg \ funcarg2 \ funcargs \ funcargs2 \ globClosgen \ ifacelookdot \ implements \ importalias \ importconst \ importfunc \ importobj \ importsym \ importtype \ importvar \ inimport \ initname \ isptrto \ loadsys \ lookdot0 \ lookdot1 \ makepartialcall \ okfor \ okforlen \ operandType \ slist \ symlink \ tointerface \ typeSet \ typeSet.add \ typeSetEntry \ typecheckExprSwitch \ typecheckTypeSwitch \ typecheckpartialcall \ typecheckrange \ typecheckrangeExpr \ typecheckselect \ typecheckswitch \ vargen \ builtin.go \ builtin_test.go \ const.go \ func.go \ iexport.go \ iimport.go \ mapfile_mmap.go \ syms.go \ target.go \ typecheck.go \ unsafe.go \ universe.go \ cmd/compile/internal/typecheck ' rm gen.go types.go types_acc.go sed -i '' 's/package gc/package typecheck/' mapfile_read.go mkbuiltin.go mv mapfile_read.go ../typecheck # not part of default build mv mkbuiltin.go ../typecheck # package main helper mv builtin ../typecheck cd ../typecheck mv dcl.go dcl1.go mv typecheck.go typecheck1.go mv universe.go universe1.go rf ' # Sweep some small files into larger ones. # "mv sym... file1.go file.go" (after the mv file1.go file.go above) # lets us insert sym... at the top of file.go. mv okfor okforeq universe1.go universe.go mv DeclContext vargen dcl1.go Temp TempAt autotmpname NewMethodType dcl.go mv InitTodoFunc inimport decldepth TypecheckAllowed typecheck1.go typecheck.go mv inl.go closure.go func.go mv range.go select.go swt.go stmt.go mv Lookup loadsys LookupRuntimeFunc LookupRuntimeVar syms.go mv unsafe.go const.go mv TypecheckAssignExpr AssignExpr mv TypecheckExpr Expr mv TypecheckStmt Stmt mv TypecheckExprs Exprs mv TypecheckStmts Stmts mv TypecheckCall Call mv TypecheckCallee Callee mv _typecheck check mv TypecheckFunc Func mv TypecheckFuncBody FuncBody mv TypecheckImports AllImportedBodies mv TypecheckImportedBody ImportedBody mv TypecheckInit Init mv TypecheckPackage Package ' rm gen.go go.go init.go main.go reflect.go Change-Id: Iea6a7aaf6407d690670ec58aeb36cc0b280f80b0 Reviewed-on: https://go-review.googlesource.com/c/go/+/279236 Trust: Russ Cox <rsc@golang.org> Run-TryBot: Russ Cox <rsc@golang.org> TryBot-Result: Go Bot <gobot@golang.org> Reviewed-by: Matthew Dempsky <mdempsky@google.com>
Diffstat (limited to 'src/cmd/compile/internal/gc')
-rw-r--r--src/cmd/compile/internal/gc/abiutils_test.go3
-rw-r--r--src/cmd/compile/internal/gc/abiutilsaux_test.go5
-rw-r--r--src/cmd/compile/internal/gc/alg.go93
-rw-r--r--src/cmd/compile/internal/gc/bexport.go185
-rw-r--r--src/cmd/compile/internal/gc/builtin.go344
-rw-r--r--src/cmd/compile/internal/gc/builtin/runtime.go259
-rw-r--r--src/cmd/compile/internal/gc/builtin_test.go33
-rw-r--r--src/cmd/compile/internal/gc/closure.go310
-rw-r--r--src/cmd/compile/internal/gc/const.go864
-rw-r--r--src/cmd/compile/internal/gc/dcl.go580
-rw-r--r--src/cmd/compile/internal/gc/embed.go7
-rw-r--r--src/cmd/compile/internal/gc/escape.go13
-rw-r--r--src/cmd/compile/internal/gc/export.go191
-rw-r--r--src/cmd/compile/internal/gc/gen.go76
-rw-r--r--src/cmd/compile/internal/gc/go.go25
-rw-r--r--src/cmd/compile/internal/gc/gsubr.go21
-rw-r--r--src/cmd/compile/internal/gc/iexport.go1613
-rw-r--r--src/cmd/compile/internal/gc/iimport.go1141
-rw-r--r--src/cmd/compile/internal/gc/init.go41
-rw-r--r--src/cmd/compile/internal/gc/inl.go110
-rw-r--r--src/cmd/compile/internal/gc/main.go61
-rw-r--r--src/cmd/compile/internal/gc/mapfile_mmap.go48
-rw-r--r--src/cmd/compile/internal/gc/mapfile_read.go21
-rw-r--r--src/cmd/compile/internal/gc/mkbuiltin.go228
-rw-r--r--src/cmd/compile/internal/gc/noder.go41
-rw-r--r--src/cmd/compile/internal/gc/obj.go31
-rw-r--r--src/cmd/compile/internal/gc/order.go61
-rw-r--r--src/cmd/compile/internal/gc/pgen.go7
-rw-r--r--src/cmd/compile/internal/gc/pgen_test.go5
-rw-r--r--src/cmd/compile/internal/gc/range.go198
-rw-r--r--src/cmd/compile/internal/gc/reflect.go53
-rw-r--r--src/cmd/compile/internal/gc/select.go144
-rw-r--r--src/cmd/compile/internal/gc/sinit.go45
-rw-r--r--src/cmd/compile/internal/gc/ssa.go275
-rw-r--r--src/cmd/compile/internal/gc/subr.go866
-rw-r--r--src/cmd/compile/internal/gc/swt.go244
-rw-r--r--src/cmd/compile/internal/gc/typecheck.go4147
-rw-r--r--src/cmd/compile/internal/gc/types.go5
-rw-r--r--src/cmd/compile/internal/gc/types_acc.go8
-rw-r--r--src/cmd/compile/internal/gc/universe.go347
-rw-r--r--src/cmd/compile/internal/gc/unsafe.go90
-rw-r--r--src/cmd/compile/internal/gc/walk.go543
42 files changed, 827 insertions, 12555 deletions
diff --git a/src/cmd/compile/internal/gc/abiutils_test.go b/src/cmd/compile/internal/gc/abiutils_test.go
index 5a88332de8..fe9a838688 100644
--- a/src/cmd/compile/internal/gc/abiutils_test.go
+++ b/src/cmd/compile/internal/gc/abiutils_test.go
@@ -7,6 +7,7 @@ package gc
import (
"bufio"
"cmd/compile/internal/base"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/obj/x86"
@@ -42,7 +43,7 @@ func TestMain(m *testing.M) {
types.TypeLinkSym = func(t *types.Type) *obj.LSym {
return typenamesym(t).Linksym()
}
- TypecheckInit()
+ typecheck.Init()
os.Exit(m.Run())
}
diff --git a/src/cmd/compile/internal/gc/abiutilsaux_test.go b/src/cmd/compile/internal/gc/abiutilsaux_test.go
index 8585ab9a30..e6590beac0 100644
--- a/src/cmd/compile/internal/gc/abiutilsaux_test.go
+++ b/src/cmd/compile/internal/gc/abiutilsaux_test.go
@@ -9,6 +9,7 @@ package gc
import (
"cmd/compile/internal/ir"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/src"
"fmt"
@@ -19,7 +20,7 @@ import (
func mkParamResultField(t *types.Type, s *types.Sym, which ir.Class) *types.Field {
field := types.NewField(src.NoXPos, s, t)
- n := NewName(s)
+ n := typecheck.NewName(s)
n.Class_ = which
field.Nname = n
n.SetType(t)
@@ -42,7 +43,7 @@ func mkstruct(fieldtypes []*types.Type) *types.Type {
}
func mkFuncType(rcvr *types.Type, ins []*types.Type, outs []*types.Type) *types.Type {
- q := lookup("?")
+ q := typecheck.Lookup("?")
inf := []*types.Field{}
for _, it := range ins {
inf = append(inf, mkParamResultField(it, q, ir.PPARAM))
diff --git a/src/cmd/compile/internal/gc/alg.go b/src/cmd/compile/internal/gc/alg.go
index dab27b4929..b0d46eab2f 100644
--- a/src/cmd/compile/internal/gc/alg.go
+++ b/src/cmd/compile/internal/gc/alg.go
@@ -7,6 +7,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/obj"
"fmt"
@@ -106,7 +107,7 @@ func genhash(t *types.Type) *obj.LSym {
return closure
}
if memhashvarlen == nil {
- memhashvarlen = sysfunc("memhash_varlen")
+ memhashvarlen = typecheck.LookupRuntimeFunc("memhash_varlen")
}
ot := 0
ot = dsymptr(closure, ot, memhashvarlen, 0)
@@ -143,17 +144,17 @@ func genhash(t *types.Type) *obj.LSym {
}
base.Pos = base.AutogeneratedPos // less confusing than end of input
- dclcontext = ir.PEXTERN
+ typecheck.DeclContext = ir.PEXTERN
// func sym(p *T, h uintptr) uintptr
args := []*ir.Field{
- ir.NewField(base.Pos, lookup("p"), nil, types.NewPtr(t)),
- ir.NewField(base.Pos, lookup("h"), nil, types.Types[types.TUINTPTR]),
+ ir.NewField(base.Pos, typecheck.Lookup("p"), nil, types.NewPtr(t)),
+ ir.NewField(base.Pos, typecheck.Lookup("h"), nil, types.Types[types.TUINTPTR]),
}
results := []*ir.Field{ir.NewField(base.Pos, nil, nil, types.Types[types.TUINTPTR])}
tfn := ir.NewFuncType(base.Pos, nil, args, results)
- fn := dclfunc(sym, tfn)
+ fn := typecheck.DeclFunc(sym, tfn)
np := ir.AsNode(tfn.Type().Params().Field(0).Nname)
nh := ir.AsNode(tfn.Type().Params().Field(1).Nname)
@@ -165,7 +166,7 @@ func genhash(t *types.Type) *obj.LSym {
hashel := hashfor(t.Elem())
// for i := 0; i < nelem; i++
- ni := temp(types.Types[types.TINT])
+ ni := typecheck.Temp(types.Types[types.TINT])
init := ir.NewAssignStmt(base.Pos, ni, ir.NewInt(0))
cond := ir.NewBinaryExpr(base.Pos, ir.OLT, ni, ir.NewInt(t.NumElem()))
post := ir.NewAssignStmt(base.Pos, ni, ir.NewBinaryExpr(base.Pos, ir.OADD, ni, ir.NewInt(1)))
@@ -177,7 +178,7 @@ func genhash(t *types.Type) *obj.LSym {
nx := ir.NewIndexExpr(base.Pos, np, ni)
nx.SetBounded(true)
- na := nodAddr(nx)
+ na := typecheck.NodAddr(nx)
call.Args.Append(na)
call.Args.Append(nh)
loop.Body.Append(ir.NewAssignStmt(base.Pos, nh, call))
@@ -201,7 +202,7 @@ func genhash(t *types.Type) *obj.LSym {
hashel := hashfor(f.Type)
call := ir.NewCallExpr(base.Pos, ir.OCALL, hashel, nil)
nx := ir.NewSelectorExpr(base.Pos, ir.OXDOT, np, f.Sym) // TODO: fields from other packages?
- na := nodAddr(nx)
+ na := typecheck.NodAddr(nx)
call.Args.Append(na)
call.Args.Append(nh)
fn.Body.Append(ir.NewAssignStmt(base.Pos, nh, call))
@@ -216,7 +217,7 @@ func genhash(t *types.Type) *obj.LSym {
hashel := hashmem(f.Type)
call := ir.NewCallExpr(base.Pos, ir.OCALL, hashel, nil)
nx := ir.NewSelectorExpr(base.Pos, ir.OXDOT, np, f.Sym) // TODO: fields from other packages?
- na := nodAddr(nx)
+ na := typecheck.NodAddr(nx)
call.Args.Append(na)
call.Args.Append(nh)
call.Args.Append(ir.NewInt(size))
@@ -234,13 +235,13 @@ func genhash(t *types.Type) *obj.LSym {
ir.DumpList("genhash body", fn.Body)
}
- funcbody()
+ typecheck.FinishFuncBody()
fn.SetDupok(true)
- typecheckFunc(fn)
+ typecheck.Func(fn)
ir.CurFunc = fn
- typecheckslice(fn.Body, ctxStmt)
+ typecheck.Stmts(fn.Body)
ir.CurFunc = nil
if base.Debug.DclStack != 0 {
@@ -248,7 +249,7 @@ func genhash(t *types.Type) *obj.LSym {
}
fn.SetNilCheckDisabled(true)
- Target.Decls = append(Target.Decls, fn)
+ typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
// Build closure. It doesn't close over any variables, so
// it contains just the function pointer.
@@ -284,9 +285,9 @@ func hashfor(t *types.Type) ir.Node {
sym = typesymprefix(".hash", t)
}
- n := NewName(sym)
+ n := typecheck.NewName(sym)
ir.MarkFunc(n)
- n.SetType(functype(nil, []*ir.Field{
+ n.SetType(typecheck.NewFuncType(nil, []*ir.Field{
ir.NewField(base.Pos, nil, nil, types.NewPtr(t)),
ir.NewField(base.Pos, nil, nil, types.Types[types.TUINTPTR]),
}, []*ir.Field{
@@ -298,9 +299,9 @@ func hashfor(t *types.Type) ir.Node {
// sysClosure returns a closure which will call the
// given runtime function (with no closed-over variables).
func sysClosure(name string) *obj.LSym {
- s := sysvar(name + "·f")
+ s := typecheck.LookupRuntimeVar(name + "·f")
if len(s.P) == 0 {
- f := sysfunc(name)
+ f := typecheck.LookupRuntimeFunc(name)
dsymptr(s, 0, f, 0)
ggloblsym(s, int32(types.PtrSize), obj.DUPOK|obj.RODATA)
}
@@ -349,7 +350,7 @@ func geneq(t *types.Type) *obj.LSym {
return closure
}
if memequalvarlen == nil {
- memequalvarlen = sysvar("memequal_varlen") // asm func
+ memequalvarlen = typecheck.LookupRuntimeVar("memequal_varlen") // asm func
}
ot := 0
ot = dsymptr(closure, ot, memequalvarlen, 0)
@@ -372,20 +373,20 @@ func geneq(t *types.Type) *obj.LSym {
// Autogenerate code for equality of structs and arrays.
base.Pos = base.AutogeneratedPos // less confusing than end of input
- dclcontext = ir.PEXTERN
+ typecheck.DeclContext = ir.PEXTERN
// func sym(p, q *T) bool
tfn := ir.NewFuncType(base.Pos, nil,
- []*ir.Field{ir.NewField(base.Pos, lookup("p"), nil, types.NewPtr(t)), ir.NewField(base.Pos, lookup("q"), nil, types.NewPtr(t))},
- []*ir.Field{ir.NewField(base.Pos, lookup("r"), nil, types.Types[types.TBOOL])})
+ []*ir.Field{ir.NewField(base.Pos, typecheck.Lookup("p"), nil, types.NewPtr(t)), ir.NewField(base.Pos, typecheck.Lookup("q"), nil, types.NewPtr(t))},
+ []*ir.Field{ir.NewField(base.Pos, typecheck.Lookup("r"), nil, types.Types[types.TBOOL])})
- fn := dclfunc(sym, tfn)
+ fn := typecheck.DeclFunc(sym, tfn)
np := ir.AsNode(tfn.Type().Params().Field(0).Nname)
nq := ir.AsNode(tfn.Type().Params().Field(1).Nname)
nr := ir.AsNode(tfn.Type().Results().Field(0).Nname)
// Label to jump to if an equality test fails.
- neq := autolabel(".neq")
+ neq := typecheck.AutoLabel(".neq")
// We reach here only for types that have equality but
// cannot be handled by the standard algorithms,
@@ -450,7 +451,7 @@ func geneq(t *types.Type) *obj.LSym {
} else {
// Generate a for loop.
// for i := 0; i < nelem; i++
- i := temp(types.Types[types.TINT])
+ i := typecheck.Temp(types.Types[types.TINT])
init := ir.NewAssignStmt(base.Pos, i, ir.NewInt(0))
cond := ir.NewBinaryExpr(base.Pos, ir.OLT, i, ir.NewInt(nelem))
post := ir.NewAssignStmt(base.Pos, i, ir.NewBinaryExpr(base.Pos, ir.OADD, i, ir.NewInt(1)))
@@ -586,7 +587,7 @@ func geneq(t *types.Type) *obj.LSym {
// ret:
// return
- ret := autolabel(".ret")
+ ret := typecheck.AutoLabel(".ret")
fn.Body.Append(ir.NewLabelStmt(base.Pos, ret))
fn.Body.Append(ir.NewReturnStmt(base.Pos, nil))
@@ -610,13 +611,13 @@ func geneq(t *types.Type) *obj.LSym {
ir.DumpList("geneq body", fn.Body)
}
- funcbody()
+ typecheck.FinishFuncBody()
fn.SetDupok(true)
- typecheckFunc(fn)
+ typecheck.Func(fn)
ir.CurFunc = fn
- typecheckslice(fn.Body, ctxStmt)
+ typecheck.Stmts(fn.Body)
ir.CurFunc = nil
if base.Debug.DclStack != 0 {
@@ -628,7 +629,7 @@ func geneq(t *types.Type) *obj.LSym {
// neither of which can be nil, and our comparisons
// are shallow.
fn.SetNilCheckDisabled(true)
- Target.Decls = append(Target.Decls, fn)
+ typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
// Generate a closure which points at the function we just generated.
dsymptr(closure, 0, sym.Linksym(), 0)
@@ -660,20 +661,20 @@ func eqfield(p ir.Node, q ir.Node, field *types.Sym) ir.Node {
// which can be used to construct string equality comparison.
// eqlen must be evaluated before eqmem, and shortcircuiting is required.
func eqstring(s, t ir.Node) (eqlen *ir.BinaryExpr, eqmem *ir.CallExpr) {
- s = conv(s, types.Types[types.TSTRING])
- t = conv(t, types.Types[types.TSTRING])
+ s = typecheck.Conv(s, types.Types[types.TSTRING])
+ t = typecheck.Conv(t, types.Types[types.TSTRING])
sptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, s)
tptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, t)
- slen := conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, s), types.Types[types.TUINTPTR])
- tlen := conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, t), types.Types[types.TUINTPTR])
+ slen := typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, s), types.Types[types.TUINTPTR])
+ tlen := typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, t), types.Types[types.TUINTPTR])
- fn := syslook("memequal")
- fn = substArgTypes(fn, types.Types[types.TUINT8], types.Types[types.TUINT8])
+ fn := typecheck.LookupRuntime("memequal")
+ fn = typecheck.SubstArgTypes(fn, types.Types[types.TUINT8], types.Types[types.TUINT8])
call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, []ir.Node{sptr, tptr, ir.Copy(slen)})
- TypecheckCall(call)
+ typecheck.Call(call)
cmp := ir.NewBinaryExpr(base.Pos, ir.OEQ, slen, tlen)
- cmp = typecheck(cmp, ctxExpr).(*ir.BinaryExpr)
+ cmp = typecheck.Expr(cmp).(*ir.BinaryExpr)
cmp.SetType(types.Types[types.TBOOL])
return cmp, call
}
@@ -692,9 +693,9 @@ func eqinterface(s, t ir.Node) (eqtab *ir.BinaryExpr, eqdata *ir.CallExpr) {
// func efaceeq(typ *uintptr, x, y unsafe.Pointer) (ret bool)
var fn ir.Node
if s.Type().IsEmptyInterface() {
- fn = syslook("efaceeq")
+ fn = typecheck.LookupRuntime("efaceeq")
} else {
- fn = syslook("ifaceeq")
+ fn = typecheck.LookupRuntime("ifaceeq")
}
stab := ir.NewUnaryExpr(base.Pos, ir.OITAB, s)
@@ -707,10 +708,10 @@ func eqinterface(s, t ir.Node) (eqtab *ir.BinaryExpr, eqdata *ir.CallExpr) {
tdata.SetTypecheck(1)
call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, []ir.Node{stab, sdata, tdata})
- TypecheckCall(call)
+ typecheck.Call(call)
cmp := ir.NewBinaryExpr(base.Pos, ir.OEQ, stab, ttab)
- cmp = typecheck(cmp, ctxExpr).(*ir.BinaryExpr)
+ cmp = typecheck.Expr(cmp).(*ir.BinaryExpr)
cmp.SetType(types.Types[types.TBOOL])
return cmp, call
}
@@ -718,8 +719,8 @@ func eqinterface(s, t ir.Node) (eqtab *ir.BinaryExpr, eqdata *ir.CallExpr) {
// eqmem returns the node
// memequal(&p.field, &q.field [, size])
func eqmem(p ir.Node, q ir.Node, field *types.Sym, size int64) ir.Node {
- nx := typecheck(nodAddr(ir.NewSelectorExpr(base.Pos, ir.OXDOT, p, field)), ctxExpr)
- ny := typecheck(nodAddr(ir.NewSelectorExpr(base.Pos, ir.OXDOT, q, field)), ctxExpr)
+ nx := typecheck.Expr(typecheck.NodAddr(ir.NewSelectorExpr(base.Pos, ir.OXDOT, p, field)))
+ ny := typecheck.Expr(typecheck.NodAddr(ir.NewSelectorExpr(base.Pos, ir.OXDOT, q, field)))
fn, needsize := eqmemfunc(size, nx.Type().Elem())
call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, nil)
@@ -735,14 +736,14 @@ func eqmem(p ir.Node, q ir.Node, field *types.Sym, size int64) ir.Node {
func eqmemfunc(size int64, t *types.Type) (fn *ir.Name, needsize bool) {
switch size {
default:
- fn = syslook("memequal")
+ fn = typecheck.LookupRuntime("memequal")
needsize = true
case 1, 2, 4, 8, 16:
buf := fmt.Sprintf("memequal%d", int(size)*8)
- fn = syslook(buf)
+ fn = typecheck.LookupRuntime(buf)
}
- fn = substArgTypes(fn, t, t)
+ fn = typecheck.SubstArgTypes(fn, t, t)
return fn, needsize
}
diff --git a/src/cmd/compile/internal/gc/bexport.go b/src/cmd/compile/internal/gc/bexport.go
deleted file mode 100644
index 3c377d8ba3..0000000000
--- a/src/cmd/compile/internal/gc/bexport.go
+++ /dev/null
@@ -1,185 +0,0 @@
-// Copyright 2015 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package gc
-
-import (
- "cmd/compile/internal/ir"
- "cmd/compile/internal/types"
-)
-
-type exporter struct {
- marked map[*types.Type]bool // types already seen by markType
-}
-
-// markObject visits a reachable object.
-func (p *exporter) markObject(n ir.Node) {
- if n.Op() == ir.ONAME {
- n := n.(*ir.Name)
- if n.Class_ == ir.PFUNC {
- inlFlood(n, exportsym)
- }
- }
-
- p.markType(n.Type())
-}
-
-// markType recursively visits types reachable from t to identify
-// functions whose inline bodies may be needed.
-func (p *exporter) markType(t *types.Type) {
- if p.marked[t] {
- return
- }
- p.marked[t] = true
-
- // If this is a named type, mark all of its associated
- // methods. Skip interface types because t.Methods contains
- // only their unexpanded method set (i.e., exclusive of
- // interface embeddings), and the switch statement below
- // handles their full method set.
- if t.Sym() != nil && t.Kind() != types.TINTER {
- for _, m := range t.Methods().Slice() {
- if types.IsExported(m.Sym.Name) {
- p.markObject(ir.AsNode(m.Nname))
- }
- }
- }
-
- // Recursively mark any types that can be produced given a
- // value of type t: dereferencing a pointer; indexing or
- // iterating over an array, slice, or map; receiving from a
- // channel; accessing a struct field or interface method; or
- // calling a function.
- //
- // Notably, we don't mark function parameter types, because
- // the user already needs some way to construct values of
- // those types.
- switch t.Kind() {
- case types.TPTR, types.TARRAY, types.TSLICE:
- p.markType(t.Elem())
-
- case types.TCHAN:
- if t.ChanDir().CanRecv() {
- p.markType(t.Elem())
- }
-
- case types.TMAP:
- p.markType(t.Key())
- p.markType(t.Elem())
-
- case types.TSTRUCT:
- for _, f := range t.FieldSlice() {
- if types.IsExported(f.Sym.Name) || f.Embedded != 0 {
- p.markType(f.Type)
- }
- }
-
- case types.TFUNC:
- for _, f := range t.Results().FieldSlice() {
- p.markType(f.Type)
- }
-
- case types.TINTER:
- for _, f := range t.FieldSlice() {
- if types.IsExported(f.Sym.Name) {
- p.markType(f.Type)
- }
- }
- }
-}
-
-// ----------------------------------------------------------------------------
-// Export format
-
-// Tags. Must be < 0.
-const (
- // Objects
- packageTag = -(iota + 1)
- constTag
- typeTag
- varTag
- funcTag
- endTag
-
- // Types
- namedTag
- arrayTag
- sliceTag
- dddTag
- structTag
- pointerTag
- signatureTag
- interfaceTag
- mapTag
- chanTag
-
- // Values
- falseTag
- trueTag
- int64Tag
- floatTag
- fractionTag // not used by gc
- complexTag
- stringTag
- nilTag
- unknownTag // not used by gc (only appears in packages with errors)
-
- // Type aliases
- aliasTag
-)
-
-var predecl []*types.Type // initialized lazily
-
-func predeclared() []*types.Type {
- if predecl == nil {
- // initialize lazily to be sure that all
- // elements have been initialized before
- predecl = []*types.Type{
- // basic types
- types.Types[types.TBOOL],
- types.Types[types.TINT],
- types.Types[types.TINT8],
- types.Types[types.TINT16],
- types.Types[types.TINT32],
- types.Types[types.TINT64],
- types.Types[types.TUINT],
- types.Types[types.TUINT8],
- types.Types[types.TUINT16],
- types.Types[types.TUINT32],
- types.Types[types.TUINT64],
- types.Types[types.TUINTPTR],
- types.Types[types.TFLOAT32],
- types.Types[types.TFLOAT64],
- types.Types[types.TCOMPLEX64],
- types.Types[types.TCOMPLEX128],
- types.Types[types.TSTRING],
-
- // basic type aliases
- types.ByteType,
- types.RuneType,
-
- // error
- types.ErrorType,
-
- // untyped types
- types.UntypedBool,
- types.UntypedInt,
- types.UntypedRune,
- types.UntypedFloat,
- types.UntypedComplex,
- types.UntypedString,
- types.Types[types.TNIL],
-
- // package unsafe
- types.Types[types.TUNSAFEPTR],
-
- // invalid type (package contains errors)
- types.Types[types.Txxx],
-
- // any type, for builtin export data
- types.Types[types.TANY],
- }
- }
- return predecl
-}
diff --git a/src/cmd/compile/internal/gc/builtin.go b/src/cmd/compile/internal/gc/builtin.go
deleted file mode 100644
index 12c70fb6d4..0000000000
--- a/src/cmd/compile/internal/gc/builtin.go
+++ /dev/null
@@ -1,344 +0,0 @@
-// Code generated by mkbuiltin.go. DO NOT EDIT.
-
-package gc
-
-import (
- "cmd/compile/internal/base"
- "cmd/compile/internal/ir"
- "cmd/compile/internal/types"
-)
-
-var runtimeDecls = [...]struct {
- name string
- tag int
- typ int
-}{
- {"newobject", funcTag, 4},
- {"mallocgc", funcTag, 8},
- {"panicdivide", funcTag, 9},
- {"panicshift", funcTag, 9},
- {"panicmakeslicelen", funcTag, 9},
- {"panicmakeslicecap", funcTag, 9},
- {"throwinit", funcTag, 9},
- {"panicwrap", funcTag, 9},
- {"gopanic", funcTag, 11},
- {"gorecover", funcTag, 14},
- {"goschedguarded", funcTag, 9},
- {"goPanicIndex", funcTag, 16},
- {"goPanicIndexU", funcTag, 18},
- {"goPanicSliceAlen", funcTag, 16},
- {"goPanicSliceAlenU", funcTag, 18},
- {"goPanicSliceAcap", funcTag, 16},
- {"goPanicSliceAcapU", funcTag, 18},
- {"goPanicSliceB", funcTag, 16},
- {"goPanicSliceBU", funcTag, 18},
- {"goPanicSlice3Alen", funcTag, 16},
- {"goPanicSlice3AlenU", funcTag, 18},
- {"goPanicSlice3Acap", funcTag, 16},
- {"goPanicSlice3AcapU", funcTag, 18},
- {"goPanicSlice3B", funcTag, 16},
- {"goPanicSlice3BU", funcTag, 18},
- {"goPanicSlice3C", funcTag, 16},
- {"goPanicSlice3CU", funcTag, 18},
- {"printbool", funcTag, 19},
- {"printfloat", funcTag, 21},
- {"printint", funcTag, 23},
- {"printhex", funcTag, 25},
- {"printuint", funcTag, 25},
- {"printcomplex", funcTag, 27},
- {"printstring", funcTag, 29},
- {"printpointer", funcTag, 30},
- {"printuintptr", funcTag, 31},
- {"printiface", funcTag, 30},
- {"printeface", funcTag, 30},
- {"printslice", funcTag, 30},
- {"printnl", funcTag, 9},
- {"printsp", funcTag, 9},
- {"printlock", funcTag, 9},
- {"printunlock", funcTag, 9},
- {"concatstring2", funcTag, 34},
- {"concatstring3", funcTag, 35},
- {"concatstring4", funcTag, 36},
- {"concatstring5", funcTag, 37},
- {"concatstrings", funcTag, 39},
- {"cmpstring", funcTag, 40},
- {"intstring", funcTag, 43},
- {"slicebytetostring", funcTag, 44},
- {"slicebytetostringtmp", funcTag, 45},
- {"slicerunetostring", funcTag, 48},
- {"stringtoslicebyte", funcTag, 50},
- {"stringtoslicerune", funcTag, 53},
- {"slicecopy", funcTag, 54},
- {"decoderune", funcTag, 55},
- {"countrunes", funcTag, 56},
- {"convI2I", funcTag, 57},
- {"convT16", funcTag, 58},
- {"convT32", funcTag, 58},
- {"convT64", funcTag, 58},
- {"convTstring", funcTag, 58},
- {"convTslice", funcTag, 58},
- {"convT2E", funcTag, 59},
- {"convT2Enoptr", funcTag, 59},
- {"convT2I", funcTag, 59},
- {"convT2Inoptr", funcTag, 59},
- {"assertE2I", funcTag, 57},
- {"assertE2I2", funcTag, 60},
- {"assertI2I", funcTag, 57},
- {"assertI2I2", funcTag, 60},
- {"panicdottypeE", funcTag, 61},
- {"panicdottypeI", funcTag, 61},
- {"panicnildottype", funcTag, 62},
- {"ifaceeq", funcTag, 64},
- {"efaceeq", funcTag, 64},
- {"fastrand", funcTag, 66},
- {"makemap64", funcTag, 68},
- {"makemap", funcTag, 69},
- {"makemap_small", funcTag, 70},
- {"mapaccess1", funcTag, 71},
- {"mapaccess1_fast32", funcTag, 72},
- {"mapaccess1_fast64", funcTag, 72},
- {"mapaccess1_faststr", funcTag, 72},
- {"mapaccess1_fat", funcTag, 73},
- {"mapaccess2", funcTag, 74},
- {"mapaccess2_fast32", funcTag, 75},
- {"mapaccess2_fast64", funcTag, 75},
- {"mapaccess2_faststr", funcTag, 75},
- {"mapaccess2_fat", funcTag, 76},
- {"mapassign", funcTag, 71},
- {"mapassign_fast32", funcTag, 72},
- {"mapassign_fast32ptr", funcTag, 72},
- {"mapassign_fast64", funcTag, 72},
- {"mapassign_fast64ptr", funcTag, 72},
- {"mapassign_faststr", funcTag, 72},
- {"mapiterinit", funcTag, 77},
- {"mapdelete", funcTag, 77},
- {"mapdelete_fast32", funcTag, 78},
- {"mapdelete_fast64", funcTag, 78},
- {"mapdelete_faststr", funcTag, 78},
- {"mapiternext", funcTag, 79},
- {"mapclear", funcTag, 80},
- {"makechan64", funcTag, 82},
- {"makechan", funcTag, 83},
- {"chanrecv1", funcTag, 85},
- {"chanrecv2", funcTag, 86},
- {"chansend1", funcTag, 88},
- {"closechan", funcTag, 30},
- {"writeBarrier", varTag, 90},
- {"typedmemmove", funcTag, 91},
- {"typedmemclr", funcTag, 92},
- {"typedslicecopy", funcTag, 93},
- {"selectnbsend", funcTag, 94},
- {"selectnbrecv", funcTag, 95},
- {"selectnbrecv2", funcTag, 97},
- {"selectsetpc", funcTag, 98},
- {"selectgo", funcTag, 99},
- {"block", funcTag, 9},
- {"makeslice", funcTag, 100},
- {"makeslice64", funcTag, 101},
- {"makeslicecopy", funcTag, 102},
- {"growslice", funcTag, 104},
- {"memmove", funcTag, 105},
- {"memclrNoHeapPointers", funcTag, 106},
- {"memclrHasPointers", funcTag, 106},
- {"memequal", funcTag, 107},
- {"memequal0", funcTag, 108},
- {"memequal8", funcTag, 108},
- {"memequal16", funcTag, 108},
- {"memequal32", funcTag, 108},
- {"memequal64", funcTag, 108},
- {"memequal128", funcTag, 108},
- {"f32equal", funcTag, 109},
- {"f64equal", funcTag, 109},
- {"c64equal", funcTag, 109},
- {"c128equal", funcTag, 109},
- {"strequal", funcTag, 109},
- {"interequal", funcTag, 109},
- {"nilinterequal", funcTag, 109},
- {"memhash", funcTag, 110},
- {"memhash0", funcTag, 111},
- {"memhash8", funcTag, 111},
- {"memhash16", funcTag, 111},
- {"memhash32", funcTag, 111},
- {"memhash64", funcTag, 111},
- {"memhash128", funcTag, 111},
- {"f32hash", funcTag, 111},
- {"f64hash", funcTag, 111},
- {"c64hash", funcTag, 111},
- {"c128hash", funcTag, 111},
- {"strhash", funcTag, 111},
- {"interhash", funcTag, 111},
- {"nilinterhash", funcTag, 111},
- {"int64div", funcTag, 112},
- {"uint64div", funcTag, 113},
- {"int64mod", funcTag, 112},
- {"uint64mod", funcTag, 113},
- {"float64toint64", funcTag, 114},
- {"float64touint64", funcTag, 115},
- {"float64touint32", funcTag, 116},
- {"int64tofloat64", funcTag, 117},
- {"uint64tofloat64", funcTag, 118},
- {"uint32tofloat64", funcTag, 119},
- {"complex128div", funcTag, 120},
- {"racefuncenter", funcTag, 31},
- {"racefuncenterfp", funcTag, 9},
- {"racefuncexit", funcTag, 9},
- {"raceread", funcTag, 31},
- {"racewrite", funcTag, 31},
- {"racereadrange", funcTag, 121},
- {"racewriterange", funcTag, 121},
- {"msanread", funcTag, 121},
- {"msanwrite", funcTag, 121},
- {"msanmove", funcTag, 122},
- {"checkptrAlignment", funcTag, 123},
- {"checkptrArithmetic", funcTag, 125},
- {"libfuzzerTraceCmp1", funcTag, 127},
- {"libfuzzerTraceCmp2", funcTag, 129},
- {"libfuzzerTraceCmp4", funcTag, 130},
- {"libfuzzerTraceCmp8", funcTag, 131},
- {"libfuzzerTraceConstCmp1", funcTag, 127},
- {"libfuzzerTraceConstCmp2", funcTag, 129},
- {"libfuzzerTraceConstCmp4", funcTag, 130},
- {"libfuzzerTraceConstCmp8", funcTag, 131},
- {"x86HasPOPCNT", varTag, 6},
- {"x86HasSSE41", varTag, 6},
- {"x86HasFMA", varTag, 6},
- {"armHasVFPv4", varTag, 6},
- {"arm64HasATOMICS", varTag, 6},
-}
-
-func runtimeTypes() []*types.Type {
- var typs [132]*types.Type
- typs[0] = types.ByteType
- typs[1] = types.NewPtr(typs[0])
- typs[2] = types.Types[types.TANY]
- typs[3] = types.NewPtr(typs[2])
- typs[4] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])})
- typs[5] = types.Types[types.TUINTPTR]
- typs[6] = types.Types[types.TBOOL]
- typs[7] = types.Types[types.TUNSAFEPTR]
- typs[8] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[6])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
- typs[9] = functype(nil, nil, nil)
- typs[10] = types.Types[types.TINTER]
- typs[11] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[10])}, nil)
- typs[12] = types.Types[types.TINT32]
- typs[13] = types.NewPtr(typs[12])
- typs[14] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[13])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[10])})
- typs[15] = types.Types[types.TINT]
- typs[16] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[15])}, nil)
- typs[17] = types.Types[types.TUINT]
- typs[18] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[17]), ir.NewField(base.Pos, nil, nil, typs[15])}, nil)
- typs[19] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])}, nil)
- typs[20] = types.Types[types.TFLOAT64]
- typs[21] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])}, nil)
- typs[22] = types.Types[types.TINT64]
- typs[23] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22])}, nil)
- typs[24] = types.Types[types.TUINT64]
- typs[25] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24])}, nil)
- typs[26] = types.Types[types.TCOMPLEX128]
- typs[27] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[26])}, nil)
- typs[28] = types.Types[types.TSTRING]
- typs[29] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])}, nil)
- typs[30] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2])}, nil)
- typs[31] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
- typs[32] = types.NewArray(typs[0], 32)
- typs[33] = types.NewPtr(typs[32])
- typs[34] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
- typs[35] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
- typs[36] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
- typs[37] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
- typs[38] = types.NewSlice(typs[28])
- typs[39] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[38])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
- typs[40] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15])})
- typs[41] = types.NewArray(typs[0], 4)
- typs[42] = types.NewPtr(typs[41])
- typs[43] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[42]), ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
- typs[44] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
- typs[45] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
- typs[46] = types.RuneType
- typs[47] = types.NewSlice(typs[46])
- typs[48] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[47])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
- typs[49] = types.NewSlice(typs[0])
- typs[50] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[49])})
- typs[51] = types.NewArray(typs[46], 32)
- typs[52] = types.NewPtr(typs[51])
- typs[53] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[52]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[47])})
- typs[54] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[5])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15])})
- typs[55] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[46]), ir.NewField(base.Pos, nil, nil, typs[15])})
- typs[56] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15])})
- typs[57] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2])})
- typs[58] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
- typs[59] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2])})
- typs[60] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2]), ir.NewField(base.Pos, nil, nil, typs[6])})
- typs[61] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[1])}, nil)
- typs[62] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1])}, nil)
- typs[63] = types.NewPtr(typs[5])
- typs[64] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[63]), ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[7])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
- typs[65] = types.Types[types.TUINT32]
- typs[66] = functype(nil, nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[65])})
- typs[67] = types.NewMap(typs[2], typs[2])
- typs[68] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[22]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[67])})
- typs[69] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[67])})
- typs[70] = functype(nil, nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[67])})
- typs[71] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])})
- typs[72] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])})
- typs[73] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[1])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])})
- typs[74] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[6])})
- typs[75] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[6])})
- typs[76] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[1])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[6])})
- typs[77] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
- typs[78] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[2])}, nil)
- typs[79] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
- typs[80] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67])}, nil)
- typs[81] = types.NewChan(typs[2], types.Cboth)
- typs[82] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[81])})
- typs[83] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[81])})
- typs[84] = types.NewChan(typs[2], types.Crecv)
- typs[85] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[84]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
- typs[86] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[84]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
- typs[87] = types.NewChan(typs[2], types.Csend)
- typs[88] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[87]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
- typs[89] = types.NewArray(typs[0], 3)
- typs[90] = tostruct([]*ir.Field{ir.NewField(base.Pos, lookup("enabled"), nil, typs[6]), ir.NewField(base.Pos, lookup("pad"), nil, typs[89]), ir.NewField(base.Pos, lookup("needed"), nil, typs[6]), ir.NewField(base.Pos, lookup("cgo"), nil, typs[6]), ir.NewField(base.Pos, lookup("alignme"), nil, typs[24])})
- typs[91] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
- typs[92] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
- typs[93] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15])})
- typs[94] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[87]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
- typs[95] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[84])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
- typs[96] = types.NewPtr(typs[6])
- typs[97] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[96]), ir.NewField(base.Pos, nil, nil, typs[84])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
- typs[98] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[63])}, nil)
- typs[99] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[63]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[6])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[6])})
- typs[100] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
- typs[101] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[22]), ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
- typs[102] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[7])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
- typs[103] = types.NewSlice(typs[2])
- typs[104] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[103]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[103])})
- typs[105] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
- typs[106] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
- typs[107] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[5])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
- typs[108] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
- typs[109] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[7])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
- typs[110] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[5])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5])})
- typs[111] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[5])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5])})
- typs[112] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22]), ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22])})
- typs[113] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24]), ir.NewField(base.Pos, nil, nil, typs[24])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24])})
- typs[114] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22])})
- typs[115] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24])})
- typs[116] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[65])})
- typs[117] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])})
- typs[118] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])})
- typs[119] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[65])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])})
- typs[120] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[26]), ir.NewField(base.Pos, nil, nil, typs[26])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[26])})
- typs[121] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
- typs[122] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
- typs[123] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
- typs[124] = types.NewSlice(typs[7])
- typs[125] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[124])}, nil)
- typs[126] = types.Types[types.TUINT8]
- typs[127] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[126]), ir.NewField(base.Pos, nil, nil, typs[126])}, nil)
- typs[128] = types.Types[types.TUINT16]
- typs[129] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[128]), ir.NewField(base.Pos, nil, nil, typs[128])}, nil)
- typs[130] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[65]), ir.NewField(base.Pos, nil, nil, typs[65])}, nil)
- typs[131] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24]), ir.NewField(base.Pos, nil, nil, typs[24])}, nil)
- return typs[:]
-}
diff --git a/src/cmd/compile/internal/gc/builtin/runtime.go b/src/cmd/compile/internal/gc/builtin/runtime.go
deleted file mode 100644
index acb69c7b28..0000000000
--- a/src/cmd/compile/internal/gc/builtin/runtime.go
+++ /dev/null
@@ -1,259 +0,0 @@
-// Copyright 2009 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// NOTE: If you change this file you must run "go generate"
-// to update builtin.go. This is not done automatically
-// to avoid depending on having a working compiler binary.
-
-// +build ignore
-
-package runtime
-
-// emitted by compiler, not referred to by go programs
-
-import "unsafe"
-
-func newobject(typ *byte) *any
-func mallocgc(size uintptr, typ *byte, needszero bool) unsafe.Pointer
-func panicdivide()
-func panicshift()
-func panicmakeslicelen()
-func panicmakeslicecap()
-func throwinit()
-func panicwrap()
-
-func gopanic(interface{})
-func gorecover(*int32) interface{}
-func goschedguarded()
-
-// Note: these declarations are just for wasm port.
-// Other ports call assembly stubs instead.
-func goPanicIndex(x int, y int)
-func goPanicIndexU(x uint, y int)
-func goPanicSliceAlen(x int, y int)
-func goPanicSliceAlenU(x uint, y int)
-func goPanicSliceAcap(x int, y int)
-func goPanicSliceAcapU(x uint, y int)
-func goPanicSliceB(x int, y int)
-func goPanicSliceBU(x uint, y int)
-func goPanicSlice3Alen(x int, y int)
-func goPanicSlice3AlenU(x uint, y int)
-func goPanicSlice3Acap(x int, y int)
-func goPanicSlice3AcapU(x uint, y int)
-func goPanicSlice3B(x int, y int)
-func goPanicSlice3BU(x uint, y int)
-func goPanicSlice3C(x int, y int)
-func goPanicSlice3CU(x uint, y int)
-
-func printbool(bool)
-func printfloat(float64)
-func printint(int64)
-func printhex(uint64)
-func printuint(uint64)
-func printcomplex(complex128)
-func printstring(string)
-func printpointer(any)
-func printuintptr(uintptr)
-func printiface(any)
-func printeface(any)
-func printslice(any)
-func printnl()
-func printsp()
-func printlock()
-func printunlock()
-
-func concatstring2(*[32]byte, string, string) string
-func concatstring3(*[32]byte, string, string, string) string
-func concatstring4(*[32]byte, string, string, string, string) string
-func concatstring5(*[32]byte, string, string, string, string, string) string
-func concatstrings(*[32]byte, []string) string
-
-func cmpstring(string, string) int
-func intstring(*[4]byte, int64) string
-func slicebytetostring(buf *[32]byte, ptr *byte, n int) string
-func slicebytetostringtmp(ptr *byte, n int) string
-func slicerunetostring(*[32]byte, []rune) string
-func stringtoslicebyte(*[32]byte, string) []byte
-func stringtoslicerune(*[32]rune, string) []rune
-func slicecopy(toPtr *any, toLen int, fromPtr *any, fromLen int, wid uintptr) int
-
-func decoderune(string, int) (retv rune, retk int)
-func countrunes(string) int
-
-// Non-empty-interface to non-empty-interface conversion.
-func convI2I(typ *byte, elem any) (ret any)
-
-// Specialized type-to-interface conversion.
-// These return only a data pointer.
-func convT16(val any) unsafe.Pointer // val must be uint16-like (same size and alignment as a uint16)
-func convT32(val any) unsafe.Pointer // val must be uint32-like (same size and alignment as a uint32)
-func convT64(val any) unsafe.Pointer // val must be uint64-like (same size and alignment as a uint64 and contains no pointers)
-func convTstring(val any) unsafe.Pointer // val must be a string
-func convTslice(val any) unsafe.Pointer // val must be a slice
-
-// Type to empty-interface conversion.
-func convT2E(typ *byte, elem *any) (ret any)
-func convT2Enoptr(typ *byte, elem *any) (ret any)
-
-// Type to non-empty-interface conversion.
-func convT2I(tab *byte, elem *any) (ret any)
-func convT2Inoptr(tab *byte, elem *any) (ret any)
-
-// interface type assertions x.(T)
-func assertE2I(typ *byte, iface any) (ret any)
-func assertE2I2(typ *byte, iface any) (ret any, b bool)
-func assertI2I(typ *byte, iface any) (ret any)
-func assertI2I2(typ *byte, iface any) (ret any, b bool)
-func panicdottypeE(have, want, iface *byte)
-func panicdottypeI(have, want, iface *byte)
-func panicnildottype(want *byte)
-
-// interface equality. Type/itab pointers are already known to be equal, so
-// we only need to pass one.
-func ifaceeq(tab *uintptr, x, y unsafe.Pointer) (ret bool)
-func efaceeq(typ *uintptr, x, y unsafe.Pointer) (ret bool)
-
-func fastrand() uint32
-
-// *byte is really *runtime.Type
-func makemap64(mapType *byte, hint int64, mapbuf *any) (hmap map[any]any)
-func makemap(mapType *byte, hint int, mapbuf *any) (hmap map[any]any)
-func makemap_small() (hmap map[any]any)
-func mapaccess1(mapType *byte, hmap map[any]any, key *any) (val *any)
-func mapaccess1_fast32(mapType *byte, hmap map[any]any, key any) (val *any)
-func mapaccess1_fast64(mapType *byte, hmap map[any]any, key any) (val *any)
-func mapaccess1_faststr(mapType *byte, hmap map[any]any, key any) (val *any)
-func mapaccess1_fat(mapType *byte, hmap map[any]any, key *any, zero *byte) (val *any)
-func mapaccess2(mapType *byte, hmap map[any]any, key *any) (val *any, pres bool)
-func mapaccess2_fast32(mapType *byte, hmap map[any]any, key any) (val *any, pres bool)
-func mapaccess2_fast64(mapType *byte, hmap map[any]any, key any) (val *any, pres bool)
-func mapaccess2_faststr(mapType *byte, hmap map[any]any, key any) (val *any, pres bool)
-func mapaccess2_fat(mapType *byte, hmap map[any]any, key *any, zero *byte) (val *any, pres bool)
-func mapassign(mapType *byte, hmap map[any]any, key *any) (val *any)
-func mapassign_fast32(mapType *byte, hmap map[any]any, key any) (val *any)
-func mapassign_fast32ptr(mapType *byte, hmap map[any]any, key any) (val *any)
-func mapassign_fast64(mapType *byte, hmap map[any]any, key any) (val *any)
-func mapassign_fast64ptr(mapType *byte, hmap map[any]any, key any) (val *any)
-func mapassign_faststr(mapType *byte, hmap map[any]any, key any) (val *any)
-func mapiterinit(mapType *byte, hmap map[any]any, hiter *any)
-func mapdelete(mapType *byte, hmap map[any]any, key *any)
-func mapdelete_fast32(mapType *byte, hmap map[any]any, key any)
-func mapdelete_fast64(mapType *byte, hmap map[any]any, key any)
-func mapdelete_faststr(mapType *byte, hmap map[any]any, key any)
-func mapiternext(hiter *any)
-func mapclear(mapType *byte, hmap map[any]any)
-
-// *byte is really *runtime.Type
-func makechan64(chanType *byte, size int64) (hchan chan any)
-func makechan(chanType *byte, size int) (hchan chan any)
-func chanrecv1(hchan <-chan any, elem *any)
-func chanrecv2(hchan <-chan any, elem *any) bool
-func chansend1(hchan chan<- any, elem *any)
-func closechan(hchan any)
-
-var writeBarrier struct {
- enabled bool
- pad [3]byte
- needed bool
- cgo bool
- alignme uint64
-}
-
-// *byte is really *runtime.Type
-func typedmemmove(typ *byte, dst *any, src *any)
-func typedmemclr(typ *byte, dst *any)
-func typedslicecopy(typ *byte, dstPtr *any, dstLen int, srcPtr *any, srcLen int) int
-
-func selectnbsend(hchan chan<- any, elem *any) bool
-func selectnbrecv(elem *any, hchan <-chan any) bool
-func selectnbrecv2(elem *any, received *bool, hchan <-chan any) bool
-
-func selectsetpc(pc *uintptr)
-func selectgo(cas0 *byte, order0 *byte, pc0 *uintptr, nsends int, nrecvs int, block bool) (int, bool)
-func block()
-
-func makeslice(typ *byte, len int, cap int) unsafe.Pointer
-func makeslice64(typ *byte, len int64, cap int64) unsafe.Pointer
-func makeslicecopy(typ *byte, tolen int, fromlen int, from unsafe.Pointer) unsafe.Pointer
-func growslice(typ *byte, old []any, cap int) (ary []any)
-func memmove(to *any, frm *any, length uintptr)
-func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr)
-func memclrHasPointers(ptr unsafe.Pointer, n uintptr)
-
-func memequal(x, y *any, size uintptr) bool
-func memequal0(x, y *any) bool
-func memequal8(x, y *any) bool
-func memequal16(x, y *any) bool
-func memequal32(x, y *any) bool
-func memequal64(x, y *any) bool
-func memequal128(x, y *any) bool
-func f32equal(p, q unsafe.Pointer) bool
-func f64equal(p, q unsafe.Pointer) bool
-func c64equal(p, q unsafe.Pointer) bool
-func c128equal(p, q unsafe.Pointer) bool
-func strequal(p, q unsafe.Pointer) bool
-func interequal(p, q unsafe.Pointer) bool
-func nilinterequal(p, q unsafe.Pointer) bool
-
-func memhash(p unsafe.Pointer, h uintptr, size uintptr) uintptr
-func memhash0(p unsafe.Pointer, h uintptr) uintptr
-func memhash8(p unsafe.Pointer, h uintptr) uintptr
-func memhash16(p unsafe.Pointer, h uintptr) uintptr
-func memhash32(p unsafe.Pointer, h uintptr) uintptr
-func memhash64(p unsafe.Pointer, h uintptr) uintptr
-func memhash128(p unsafe.Pointer, h uintptr) uintptr
-func f32hash(p unsafe.Pointer, h uintptr) uintptr
-func f64hash(p unsafe.Pointer, h uintptr) uintptr
-func c64hash(p unsafe.Pointer, h uintptr) uintptr
-func c128hash(p unsafe.Pointer, h uintptr) uintptr
-func strhash(a unsafe.Pointer, h uintptr) uintptr
-func interhash(p unsafe.Pointer, h uintptr) uintptr
-func nilinterhash(p unsafe.Pointer, h uintptr) uintptr
-
-// only used on 32-bit
-func int64div(int64, int64) int64
-func uint64div(uint64, uint64) uint64
-func int64mod(int64, int64) int64
-func uint64mod(uint64, uint64) uint64
-func float64toint64(float64) int64
-func float64touint64(float64) uint64
-func float64touint32(float64) uint32
-func int64tofloat64(int64) float64
-func uint64tofloat64(uint64) float64
-func uint32tofloat64(uint32) float64
-
-func complex128div(num complex128, den complex128) (quo complex128)
-
-// race detection
-func racefuncenter(uintptr)
-func racefuncenterfp()
-func racefuncexit()
-func raceread(uintptr)
-func racewrite(uintptr)
-func racereadrange(addr, size uintptr)
-func racewriterange(addr, size uintptr)
-
-// memory sanitizer
-func msanread(addr, size uintptr)
-func msanwrite(addr, size uintptr)
-func msanmove(dst, src, size uintptr)
-
-func checkptrAlignment(unsafe.Pointer, *byte, uintptr)
-func checkptrArithmetic(unsafe.Pointer, []unsafe.Pointer)
-
-func libfuzzerTraceCmp1(uint8, uint8)
-func libfuzzerTraceCmp2(uint16, uint16)
-func libfuzzerTraceCmp4(uint32, uint32)
-func libfuzzerTraceCmp8(uint64, uint64)
-func libfuzzerTraceConstCmp1(uint8, uint8)
-func libfuzzerTraceConstCmp2(uint16, uint16)
-func libfuzzerTraceConstCmp4(uint32, uint32)
-func libfuzzerTraceConstCmp8(uint64, uint64)
-
-// architecture variants
-var x86HasPOPCNT bool
-var x86HasSSE41 bool
-var x86HasFMA bool
-var armHasVFPv4 bool
-var arm64HasATOMICS bool
diff --git a/src/cmd/compile/internal/gc/builtin_test.go b/src/cmd/compile/internal/gc/builtin_test.go
deleted file mode 100644
index df15ca5c7d..0000000000
--- a/src/cmd/compile/internal/gc/builtin_test.go
+++ /dev/null
@@ -1,33 +0,0 @@
-// Copyright 2016 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package gc_test
-
-import (
- "bytes"
- "internal/testenv"
- "io/ioutil"
- "os/exec"
- "testing"
-)
-
-func TestBuiltin(t *testing.T) {
- t.Skip("mkbuiltin needs fixing")
- testenv.MustHaveGoRun(t)
- t.Parallel()
-
- old, err := ioutil.ReadFile("builtin.go")
- if err != nil {
- t.Fatal(err)
- }
-
- new, err := exec.Command(testenv.GoToolPath(t), "run", "mkbuiltin.go", "-stdout").Output()
- if err != nil {
- t.Fatal(err)
- }
-
- if !bytes.Equal(old, new) {
- t.Fatal("builtin.go out of date; run mkbuiltin.go")
- }
-}
diff --git a/src/cmd/compile/internal/gc/closure.go b/src/cmd/compile/internal/gc/closure.go
index 454d97e17f..29455bffd8 100644
--- a/src/cmd/compile/internal/gc/closure.go
+++ b/src/cmd/compile/internal/gc/closure.go
@@ -8,9 +8,9 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/syntax"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/src"
- "fmt"
)
func (p *noder) funcLit(expr *syntax.FuncLit) ir.Node {
@@ -72,156 +72,6 @@ func (p *noder) funcLit(expr *syntax.FuncLit) ir.Node {
return clo
}
-// typecheckclosure typechecks an OCLOSURE node. It also creates the named
-// function associated with the closure.
-// TODO: This creation of the named function should probably really be done in a
-// separate pass from type-checking.
-func typecheckclosure(clo *ir.ClosureExpr, top int) {
- fn := clo.Func
- // Set current associated iota value, so iota can be used inside
- // function in ConstSpec, see issue #22344
- if x := getIotaValue(); x >= 0 {
- fn.Iota = x
- }
-
- fn.ClosureType = typecheck(fn.ClosureType, ctxType)
- clo.SetType(fn.ClosureType.Type())
- fn.SetClosureCalled(top&ctxCallee != 0)
-
- // Do not typecheck fn twice, otherwise, we will end up pushing
- // fn to Target.Decls multiple times, causing initLSym called twice.
- // See #30709
- if fn.Typecheck() == 1 {
- return
- }
-
- for _, ln := range fn.ClosureVars {
- n := ln.Defn
- if !n.Name().Captured() {
- n.Name().SetCaptured(true)
- if n.Name().Decldepth == 0 {
- base.Fatalf("typecheckclosure: var %v does not have decldepth assigned", n)
- }
-
- // Ignore assignments to the variable in straightline code
- // preceding the first capturing by a closure.
- if n.Name().Decldepth == decldepth {
- n.Name().SetAssigned(false)
- }
- }
- }
-
- fn.Nname.SetSym(closurename(ir.CurFunc))
- ir.MarkFunc(fn.Nname)
- typecheckFunc(fn)
-
- // Type check the body now, but only if we're inside a function.
- // At top level (in a variable initialization: curfn==nil) we're not
- // ready to type check code yet; we'll check it later, because the
- // underlying closure function we create is added to Target.Decls.
- if ir.CurFunc != nil && clo.Type() != nil {
- oldfn := ir.CurFunc
- ir.CurFunc = fn
- olddd := decldepth
- decldepth = 1
- typecheckslice(fn.Body, ctxStmt)
- decldepth = olddd
- ir.CurFunc = oldfn
- }
-
- Target.Decls = append(Target.Decls, fn)
-}
-
-// globClosgen is like Func.Closgen, but for the global scope.
-var globClosgen int32
-
-// closurename generates a new unique name for a closure within
-// outerfunc.
-func closurename(outerfunc *ir.Func) *types.Sym {
- outer := "glob."
- prefix := "func"
- gen := &globClosgen
-
- if outerfunc != nil {
- if outerfunc.OClosure != nil {
- prefix = ""
- }
-
- outer = ir.FuncName(outerfunc)
-
- // There may be multiple functions named "_". In those
- // cases, we can't use their individual Closgens as it
- // would lead to name clashes.
- if !ir.IsBlank(outerfunc.Nname) {
- gen = &outerfunc.Closgen
- }
- }
-
- *gen++
- return lookup(fmt.Sprintf("%s.%s%d", outer, prefix, *gen))
-}
-
-// capturevarscomplete is set to true when the capturevars phase is done.
-var capturevarscomplete bool
-
-// capturevars is called in a separate phase after all typechecking is done.
-// It decides whether each variable captured by a closure should be captured
-// by value or by reference.
-// We use value capturing for values <= 128 bytes that are never reassigned
-// after capturing (effectively constant).
-func capturevars(fn *ir.Func) {
- lno := base.Pos
- base.Pos = fn.Pos()
- cvars := fn.ClosureVars
- out := cvars[:0]
- for _, v := range cvars {
- if v.Type() == nil {
- // If v.Type is nil, it means v looked like it
- // was going to be used in the closure, but
- // isn't. This happens in struct literals like
- // s{f: x} where we can't distinguish whether
- // f is a field identifier or expression until
- // resolving s.
- continue
- }
- out = append(out, v)
-
- // type check the & of closed variables outside the closure,
- // so that the outer frame also grabs them and knows they escape.
- types.CalcSize(v.Type())
-
- var outer ir.Node
- outer = v.Outer
- outermost := v.Defn.(*ir.Name)
-
- // out parameters will be assigned to implicitly upon return.
- if outermost.Class_ != ir.PPARAMOUT && !outermost.Name().Addrtaken() && !outermost.Name().Assigned() && v.Type().Width <= 128 {
- v.SetByval(true)
- } else {
- outermost.Name().SetAddrtaken(true)
- outer = nodAddr(outer)
- }
-
- if base.Flag.LowerM > 1 {
- var name *types.Sym
- if v.Curfn != nil && v.Curfn.Nname != nil {
- name = v.Curfn.Sym()
- }
- how := "ref"
- if v.Byval() {
- how = "value"
- }
- base.WarnfAt(v.Pos(), "%v capturing by %s: %v (addr=%v assign=%v width=%d)", name, how, v.Sym(), outermost.Name().Addrtaken(), outermost.Name().Assigned(), int32(v.Type().Width))
- }
-
- outer = typecheck(outer, ctxExpr)
- fn.ClosureEnter.Append(outer)
- }
-
- fn.ClosureVars = out
- base.Pos = lno
-}
-
// transformclosure is called in a separate phase after escape analysis.
// It transform closure bodies to properly reference captured variables.
func transformclosure(fn *ir.Func) {
@@ -256,7 +106,7 @@ func transformclosure(fn *ir.Func) {
// we introduce function param &v *T
// and v remains PAUTOHEAP with &v heapaddr
// (accesses will implicitly deref &v).
- addr := NewName(lookup("&" + v.Sym().Name))
+ addr := typecheck.NewName(typecheck.Lookup("&" + v.Sym().Name))
addr.SetType(types.NewPtr(v.Type()))
v.Heapaddr = addr
v = addr
@@ -300,7 +150,7 @@ func transformclosure(fn *ir.Func) {
} else {
// Declare variable holding addresses taken from closure
// and initialize in entry prologue.
- addr := NewName(lookup("&" + v.Sym().Name))
+ addr := typecheck.NewName(typecheck.Lookup("&" + v.Sym().Name))
addr.SetType(types.NewPtr(v.Type()))
addr.Class_ = ir.PAUTO
addr.SetUsed(true)
@@ -309,14 +159,14 @@ func transformclosure(fn *ir.Func) {
v.Heapaddr = addr
var src ir.Node = cr
if v.Byval() {
- src = nodAddr(cr)
+ src = typecheck.NodAddr(cr)
}
body = append(body, ir.NewAssignStmt(base.Pos, addr, src))
}
}
if len(body) > 0 {
- typecheckslice(body, ctxStmt)
+ typecheck.Stmts(body)
fn.Enter.Set(body)
fn.SetNeedctxt(true)
}
@@ -346,38 +196,6 @@ func closuredebugruntimecheck(clo *ir.ClosureExpr) {
}
}
-// closureType returns the struct type used to hold all the information
-// needed in the closure for clo (clo must be a OCLOSURE node).
-// The address of a variable of the returned type can be cast to a func.
-func closureType(clo *ir.ClosureExpr) *types.Type {
- // Create closure in the form of a composite literal.
- // supposing the closure captures an int i and a string s
- // and has one float64 argument and no results,
- // the generated code looks like:
- //
- // clos = &struct{.F uintptr; i *int; s *string}{func.1, &i, &s}
- //
- // The use of the struct provides type information to the garbage
- // collector so that it can walk the closure. We could use (in this case)
- // [3]unsafe.Pointer instead, but that would leave the gc in the dark.
- // The information appears in the binary in the form of type descriptors;
- // the struct is unnamed so that closures in multiple packages with the
- // same struct type can share the descriptor.
- fields := []*ir.Field{
- ir.NewField(base.Pos, lookup(".F"), nil, types.Types[types.TUINTPTR]),
- }
- for _, v := range clo.Func.ClosureVars {
- typ := v.Type()
- if !v.Byval() {
- typ = types.NewPtr(typ)
- }
- fields = append(fields, ir.NewField(base.Pos, v.Sym(), nil, typ))
- }
- typ := tostruct(fields)
- typ.SetNoalg(true)
- return typ
-}
-
func walkclosure(clo *ir.ClosureExpr, init *ir.Nodes) ir.Node {
fn := clo.Func
@@ -390,17 +208,17 @@ func walkclosure(clo *ir.ClosureExpr, init *ir.Nodes) ir.Node {
}
closuredebugruntimecheck(clo)
- typ := closureType(clo)
+ typ := typecheck.ClosureType(clo)
clos := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(typ).(ir.Ntype), nil)
clos.SetEsc(clo.Esc())
clos.List.Set(append([]ir.Node{ir.NewUnaryExpr(base.Pos, ir.OCFUNC, fn.Nname)}, fn.ClosureEnter...))
- addr := nodAddr(clos)
+ addr := typecheck.NodAddr(clos)
addr.SetEsc(clo.Esc())
// Force type conversion from *struct to the func type.
- cfn := convnop(addr, clo.Type())
+ cfn := typecheck.ConvNop(addr, clo.Type())
// non-escaping temp to use, if any.
if x := clo.Prealloc; x != nil {
@@ -414,110 +232,6 @@ func walkclosure(clo *ir.ClosureExpr, init *ir.Nodes) ir.Node {
return walkexpr(cfn, init)
}
-func typecheckpartialcall(n ir.Node, sym *types.Sym) *ir.CallPartExpr {
- switch n.Op() {
- case ir.ODOTINTER, ir.ODOTMETH:
- break
-
- default:
- base.Fatalf("invalid typecheckpartialcall")
- }
- dot := n.(*ir.SelectorExpr)
-
- // Create top-level function.
- fn := makepartialcall(dot, dot.Type(), sym)
- fn.SetWrapper(true)
-
- return ir.NewCallPartExpr(dot.Pos(), dot.X, dot.Selection, fn)
-}
-
-// makepartialcall returns a DCLFUNC node representing the wrapper function (*-fm) needed
-// for partial calls.
-func makepartialcall(dot *ir.SelectorExpr, t0 *types.Type, meth *types.Sym) *ir.Func {
- rcvrtype := dot.X.Type()
- sym := ir.MethodSymSuffix(rcvrtype, meth, "-fm")
-
- if sym.Uniq() {
- return sym.Def.(*ir.Func)
- }
- sym.SetUniq(true)
-
- savecurfn := ir.CurFunc
- saveLineNo := base.Pos
- ir.CurFunc = nil
-
- // Set line number equal to the line number where the method is declared.
- var m *types.Field
- if lookdot0(meth, rcvrtype, &m, false) == 1 && m.Pos.IsKnown() {
- base.Pos = m.Pos
- }
- // Note: !m.Pos.IsKnown() happens for method expressions where
- // the method is implicitly declared. The Error method of the
- // built-in error type is one such method. We leave the line
- // number at the use of the method expression in this
- // case. See issue 29389.
-
- tfn := ir.NewFuncType(base.Pos, nil,
- structargs(t0.Params(), true),
- structargs(t0.Results(), false))
-
- fn := dclfunc(sym, tfn)
- fn.SetDupok(true)
- fn.SetNeedctxt(true)
-
- // Declare and initialize variable holding receiver.
- cr := ir.NewClosureRead(rcvrtype, types.Rnd(int64(types.PtrSize), int64(rcvrtype.Align)))
- ptr := NewName(lookup(".this"))
- declare(ptr, ir.PAUTO)
- ptr.SetUsed(true)
- var body []ir.Node
- if rcvrtype.IsPtr() || rcvrtype.IsInterface() {
- ptr.SetType(rcvrtype)
- body = append(body, ir.NewAssignStmt(base.Pos, ptr, cr))
- } else {
- ptr.SetType(types.NewPtr(rcvrtype))
- body = append(body, ir.NewAssignStmt(base.Pos, ptr, nodAddr(cr)))
- }
-
- call := ir.NewCallExpr(base.Pos, ir.OCALL, ir.NewSelectorExpr(base.Pos, ir.OXDOT, ptr, meth), nil)
- call.Args.Set(ir.ParamNames(tfn.Type()))
- call.IsDDD = tfn.Type().IsVariadic()
- if t0.NumResults() != 0 {
- ret := ir.NewReturnStmt(base.Pos, nil)
- ret.Results = []ir.Node{call}
- body = append(body, ret)
- } else {
- body = append(body, call)
- }
-
- fn.Body.Set(body)
- funcbody()
-
- typecheckFunc(fn)
- // Need to typecheck the body of the just-generated wrapper.
- // typecheckslice() requires that Curfn is set when processing an ORETURN.
- ir.CurFunc = fn
- typecheckslice(fn.Body, ctxStmt)
- sym.Def = fn
- Target.Decls = append(Target.Decls, fn)
- ir.CurFunc = savecurfn
- base.Pos = saveLineNo
-
- return fn
-}
-
-// partialCallType returns the struct type used to hold all the information
-// needed in the closure for n (n must be a OCALLPART node).
-// The address of a variable of the returned type can be cast to a func.
-func partialCallType(n *ir.CallPartExpr) *types.Type {
- t := tostruct([]*ir.Field{
- ir.NewField(base.Pos, lookup("F"), nil, types.Types[types.TUINTPTR]),
- ir.NewField(base.Pos, lookup("R"), nil, n.X.Type()),
- })
- t.SetNoalg(true)
- return t
-}
-
func walkpartialcall(n *ir.CallPartExpr, init *ir.Nodes) ir.Node {
// Create closure in the form of a composite literal.
// For x.M with receiver (x) type T, the generated code looks like:
@@ -532,24 +246,24 @@ func walkpartialcall(n *ir.CallPartExpr, init *ir.Nodes) ir.Node {
n.X = cheapexpr(n.X, init)
n.X = walkexpr(n.X, nil)
- tab := typecheck(ir.NewUnaryExpr(base.Pos, ir.OITAB, n.X), ctxExpr)
+ tab := typecheck.Expr(ir.NewUnaryExpr(base.Pos, ir.OITAB, n.X))
c := ir.NewUnaryExpr(base.Pos, ir.OCHECKNIL, tab)
c.SetTypecheck(1)
init.Append(c)
}
- typ := partialCallType(n)
+ typ := typecheck.PartialCallType(n)
clos := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(typ).(ir.Ntype), nil)
clos.SetEsc(n.Esc())
clos.List = []ir.Node{ir.NewUnaryExpr(base.Pos, ir.OCFUNC, n.Func.Nname), n.X}
- addr := nodAddr(clos)
+ addr := typecheck.NodAddr(clos)
addr.SetEsc(n.Esc())
// Force type conversion from *struct to the func type.
- cfn := convnop(addr, n.Type())
+ cfn := typecheck.ConvNop(addr, n.Type())
// non-escaping temp to use, if any.
if x := n.Prealloc; x != nil {
diff --git a/src/cmd/compile/internal/gc/const.go b/src/cmd/compile/internal/gc/const.go
deleted file mode 100644
index ad27f3ea44..0000000000
--- a/src/cmd/compile/internal/gc/const.go
+++ /dev/null
@@ -1,864 +0,0 @@
-// Copyright 2009 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package gc
-
-import (
- "cmd/compile/internal/base"
- "cmd/compile/internal/ir"
- "cmd/compile/internal/types"
- "cmd/internal/src"
- "fmt"
- "go/constant"
- "go/token"
- "math"
- "math/big"
- "strings"
- "unicode"
-)
-
-func roundFloat(v constant.Value, sz int64) constant.Value {
- switch sz {
- case 4:
- f, _ := constant.Float32Val(v)
- return makeFloat64(float64(f))
- case 8:
- f, _ := constant.Float64Val(v)
- return makeFloat64(f)
- }
- base.Fatalf("unexpected size: %v", sz)
- panic("unreachable")
-}
-
-// truncate float literal fv to 32-bit or 64-bit precision
-// according to type; return truncated value.
-func truncfltlit(v constant.Value, t *types.Type) constant.Value {
- if t.IsUntyped() || overflow(v, t) {
- // If there was overflow, simply continuing would set the
- // value to Inf which in turn would lead to spurious follow-on
- // errors. Avoid this by returning the existing value.
- return v
- }
-
- return roundFloat(v, t.Size())
-}
-
-// truncate Real and Imag parts of Mpcplx to 32-bit or 64-bit
-// precision, according to type; return truncated value. In case of
-// overflow, calls Errorf but does not truncate the input value.
-func trunccmplxlit(v constant.Value, t *types.Type) constant.Value {
- if t.IsUntyped() || overflow(v, t) {
- // If there was overflow, simply continuing would set the
- // value to Inf which in turn would lead to spurious follow-on
- // errors. Avoid this by returning the existing value.
- return v
- }
-
- fsz := t.Size() / 2
- return makeComplex(roundFloat(constant.Real(v), fsz), roundFloat(constant.Imag(v), fsz))
-}
-
-// TODO(mdempsky): Replace these with better APIs.
-func convlit(n ir.Node, t *types.Type) ir.Node { return convlit1(n, t, false, nil) }
-func defaultlit(n ir.Node, t *types.Type) ir.Node { return convlit1(n, t, false, nil) }
-
-// convlit1 converts an untyped expression n to type t. If n already
-// has a type, convlit1 has no effect.
-//
-// For explicit conversions, t must be non-nil, and integer-to-string
-// conversions are allowed.
-//
-// For implicit conversions (e.g., assignments), t may be nil; if so,
-// n is converted to its default type.
-//
-// If there's an error converting n to t, context is used in the error
-// message.
-func convlit1(n ir.Node, t *types.Type, explicit bool, context func() string) ir.Node {
- if explicit && t == nil {
- base.Fatalf("explicit conversion missing type")
- }
- if t != nil && t.IsUntyped() {
- base.Fatalf("bad conversion to untyped: %v", t)
- }
-
- if n == nil || n.Type() == nil {
- // Allow sloppy callers.
- return n
- }
- if !n.Type().IsUntyped() {
- // Already typed; nothing to do.
- return n
- }
-
- // Nil is technically not a constant, so handle it specially.
- if n.Type().Kind() == types.TNIL {
- if n.Op() != ir.ONIL {
- base.Fatalf("unexpected op: %v (%v)", n, n.Op())
- }
- n = ir.Copy(n)
- if t == nil {
- base.Errorf("use of untyped nil")
- n.SetDiag(true)
- n.SetType(nil)
- return n
- }
-
- if !t.HasNil() {
- // Leave for caller to handle.
- return n
- }
-
- n.SetType(t)
- return n
- }
-
- if t == nil || !ir.OKForConst[t.Kind()] {
- t = defaultType(n.Type())
- }
-
- switch n.Op() {
- default:
- base.Fatalf("unexpected untyped expression: %v", n)
-
- case ir.OLITERAL:
- v := convertVal(n.Val(), t, explicit)
- if v.Kind() == constant.Unknown {
- n = ir.NewConstExpr(n.Val(), n)
- break
- }
- n = ir.NewConstExpr(v, n)
- n.SetType(t)
- return n
-
- case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT, ir.OREAL, ir.OIMAG:
- ot := operandType(n.Op(), t)
- if ot == nil {
- n = defaultlit(n, nil)
- break
- }
-
- n := n.(*ir.UnaryExpr)
- n.X = convlit(n.X, ot)
- if n.X.Type() == nil {
- n.SetType(nil)
- return n
- }
- n.SetType(t)
- return n
-
- case ir.OADD, ir.OSUB, ir.OMUL, ir.ODIV, ir.OMOD, ir.OOR, ir.OXOR, ir.OAND, ir.OANDNOT, ir.OOROR, ir.OANDAND, ir.OCOMPLEX:
- ot := operandType(n.Op(), t)
- if ot == nil {
- n = defaultlit(n, nil)
- break
- }
-
- var l, r ir.Node
- switch n := n.(type) {
- case *ir.BinaryExpr:
- n.X = convlit(n.X, ot)
- n.Y = convlit(n.Y, ot)
- l, r = n.X, n.Y
- case *ir.LogicalExpr:
- n.X = convlit(n.X, ot)
- n.Y = convlit(n.Y, ot)
- l, r = n.X, n.Y
- }
-
- if l.Type() == nil || r.Type() == nil {
- n.SetType(nil)
- return n
- }
- if !types.Identical(l.Type(), r.Type()) {
- base.Errorf("invalid operation: %v (mismatched types %v and %v)", n, l.Type(), r.Type())
- n.SetType(nil)
- return n
- }
-
- n.SetType(t)
- return n
-
- case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
- n := n.(*ir.BinaryExpr)
- if !t.IsBoolean() {
- break
- }
- n.SetType(t)
- return n
-
- case ir.OLSH, ir.ORSH:
- n := n.(*ir.BinaryExpr)
- n.X = convlit1(n.X, t, explicit, nil)
- n.SetType(n.X.Type())
- if n.Type() != nil && !n.Type().IsInteger() {
- base.Errorf("invalid operation: %v (shift of type %v)", n, n.Type())
- n.SetType(nil)
- }
- return n
- }
-
- if !n.Diag() {
- if !t.Broke() {
- if explicit {
- base.Errorf("cannot convert %L to type %v", n, t)
- } else if context != nil {
- base.Errorf("cannot use %L as type %v in %s", n, t, context())
- } else {
- base.Errorf("cannot use %L as type %v", n, t)
- }
- }
- n.SetDiag(true)
- }
- n.SetType(nil)
- return n
-}
-
-func operandType(op ir.Op, t *types.Type) *types.Type {
- switch op {
- case ir.OCOMPLEX:
- if t.IsComplex() {
- return types.FloatForComplex(t)
- }
- case ir.OREAL, ir.OIMAG:
- if t.IsFloat() {
- return types.ComplexForFloat(t)
- }
- default:
- if okfor[op][t.Kind()] {
- return t
- }
- }
- return nil
-}
-
-// convertVal converts v into a representation appropriate for t. If
-// no such representation exists, it returns Val{} instead.
-//
-// If explicit is true, then conversions from integer to string are
-// also allowed.
-func convertVal(v constant.Value, t *types.Type, explicit bool) constant.Value {
- switch ct := v.Kind(); ct {
- case constant.Bool:
- if t.IsBoolean() {
- return v
- }
-
- case constant.String:
- if t.IsString() {
- return v
- }
-
- case constant.Int:
- if explicit && t.IsString() {
- return tostr(v)
- }
- fallthrough
- case constant.Float, constant.Complex:
- switch {
- case t.IsInteger():
- v = toint(v)
- overflow(v, t)
- return v
- case t.IsFloat():
- v = toflt(v)
- v = truncfltlit(v, t)
- return v
- case t.IsComplex():
- v = tocplx(v)
- v = trunccmplxlit(v, t)
- return v
- }
- }
-
- return constant.MakeUnknown()
-}
-
-func tocplx(v constant.Value) constant.Value {
- return constant.ToComplex(v)
-}
-
-func toflt(v constant.Value) constant.Value {
- if v.Kind() == constant.Complex {
- if constant.Sign(constant.Imag(v)) != 0 {
- base.Errorf("constant %v truncated to real", v)
- }
- v = constant.Real(v)
- }
-
- return constant.ToFloat(v)
-}
-
-func toint(v constant.Value) constant.Value {
- if v.Kind() == constant.Complex {
- if constant.Sign(constant.Imag(v)) != 0 {
- base.Errorf("constant %v truncated to integer", v)
- }
- v = constant.Real(v)
- }
-
- if v := constant.ToInt(v); v.Kind() == constant.Int {
- return v
- }
-
- // The value of v cannot be represented as an integer;
- // so we need to print an error message.
- // Unfortunately some float values cannot be
- // reasonably formatted for inclusion in an error
- // message (example: 1 + 1e-100), so first we try to
- // format the float; if the truncation resulted in
- // something that looks like an integer we omit the
- // value from the error message.
- // (See issue #11371).
- f := ir.BigFloat(v)
- if f.MantExp(nil) > 2*ir.ConstPrec {
- base.Errorf("integer too large")
- } else {
- var t big.Float
- t.Parse(fmt.Sprint(v), 0)
- if t.IsInt() {
- base.Errorf("constant truncated to integer")
- } else {
- base.Errorf("constant %v truncated to integer", v)
- }
- }
-
- // Prevent follow-on errors.
- // TODO(mdempsky): Use constant.MakeUnknown() instead.
- return constant.MakeInt64(1)
-}
-
-// overflow reports whether constant value v is too large
-// to represent with type t, and emits an error message if so.
-func overflow(v constant.Value, t *types.Type) bool {
- // v has already been converted
- // to appropriate form for t.
- if t.IsUntyped() {
- return false
- }
- if v.Kind() == constant.Int && constant.BitLen(v) > ir.ConstPrec {
- base.Errorf("integer too large")
- return true
- }
- if ir.ConstOverflow(v, t) {
- base.Errorf("constant %v overflows %v", types.FmtConst(v, false), t)
- return true
- }
- return false
-}
-
-func tostr(v constant.Value) constant.Value {
- if v.Kind() == constant.Int {
- r := unicode.ReplacementChar
- if x, ok := constant.Uint64Val(v); ok && x <= unicode.MaxRune {
- r = rune(x)
- }
- v = constant.MakeString(string(r))
- }
- return v
-}
-
-var tokenForOp = [...]token.Token{
- ir.OPLUS: token.ADD,
- ir.ONEG: token.SUB,
- ir.ONOT: token.NOT,
- ir.OBITNOT: token.XOR,
-
- ir.OADD: token.ADD,
- ir.OSUB: token.SUB,
- ir.OMUL: token.MUL,
- ir.ODIV: token.QUO,
- ir.OMOD: token.REM,
- ir.OOR: token.OR,
- ir.OXOR: token.XOR,
- ir.OAND: token.AND,
- ir.OANDNOT: token.AND_NOT,
- ir.OOROR: token.LOR,
- ir.OANDAND: token.LAND,
-
- ir.OEQ: token.EQL,
- ir.ONE: token.NEQ,
- ir.OLT: token.LSS,
- ir.OLE: token.LEQ,
- ir.OGT: token.GTR,
- ir.OGE: token.GEQ,
-
- ir.OLSH: token.SHL,
- ir.ORSH: token.SHR,
-}
-
-// evalConst returns a constant-evaluated expression equivalent to n.
-// If n is not a constant, evalConst returns n.
-// Otherwise, evalConst returns a new OLITERAL with the same value as n,
-// and with .Orig pointing back to n.
-func evalConst(n ir.Node) ir.Node {
- // Pick off just the opcodes that can be constant evaluated.
- switch n.Op() {
- case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT:
- n := n.(*ir.UnaryExpr)
- nl := n.X
- if nl.Op() == ir.OLITERAL {
- var prec uint
- if n.Type().IsUnsigned() {
- prec = uint(n.Type().Size() * 8)
- }
- return origConst(n, constant.UnaryOp(tokenForOp[n.Op()], nl.Val(), prec))
- }
-
- case ir.OADD, ir.OSUB, ir.OMUL, ir.ODIV, ir.OMOD, ir.OOR, ir.OXOR, ir.OAND, ir.OANDNOT:
- n := n.(*ir.BinaryExpr)
- nl, nr := n.X, n.Y
- if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
- rval := nr.Val()
-
- // check for divisor underflow in complex division (see issue 20227)
- if n.Op() == ir.ODIV && n.Type().IsComplex() && constant.Sign(square(constant.Real(rval))) == 0 && constant.Sign(square(constant.Imag(rval))) == 0 {
- base.Errorf("complex division by zero")
- n.SetType(nil)
- return n
- }
- if (n.Op() == ir.ODIV || n.Op() == ir.OMOD) && constant.Sign(rval) == 0 {
- base.Errorf("division by zero")
- n.SetType(nil)
- return n
- }
-
- tok := tokenForOp[n.Op()]
- if n.Op() == ir.ODIV && n.Type().IsInteger() {
- tok = token.QUO_ASSIGN // integer division
- }
- return origConst(n, constant.BinaryOp(nl.Val(), tok, rval))
- }
-
- case ir.OOROR, ir.OANDAND:
- n := n.(*ir.LogicalExpr)
- nl, nr := n.X, n.Y
- if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
- return origConst(n, constant.BinaryOp(nl.Val(), tokenForOp[n.Op()], nr.Val()))
- }
-
- case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
- n := n.(*ir.BinaryExpr)
- nl, nr := n.X, n.Y
- if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
- return origBoolConst(n, constant.Compare(nl.Val(), tokenForOp[n.Op()], nr.Val()))
- }
-
- case ir.OLSH, ir.ORSH:
- n := n.(*ir.BinaryExpr)
- nl, nr := n.X, n.Y
- if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
- // shiftBound from go/types; "so we can express smallestFloat64"
- const shiftBound = 1023 - 1 + 52
- s, ok := constant.Uint64Val(nr.Val())
- if !ok || s > shiftBound {
- base.Errorf("invalid shift count %v", nr)
- n.SetType(nil)
- break
- }
- return origConst(n, constant.Shift(toint(nl.Val()), tokenForOp[n.Op()], uint(s)))
- }
-
- case ir.OCONV, ir.ORUNESTR:
- n := n.(*ir.ConvExpr)
- nl := n.X
- if ir.OKForConst[n.Type().Kind()] && nl.Op() == ir.OLITERAL {
- return origConst(n, convertVal(nl.Val(), n.Type(), true))
- }
-
- case ir.OCONVNOP:
- n := n.(*ir.ConvExpr)
- nl := n.X
- if ir.OKForConst[n.Type().Kind()] && nl.Op() == ir.OLITERAL {
- // set so n.Orig gets OCONV instead of OCONVNOP
- n.SetOp(ir.OCONV)
- return origConst(n, nl.Val())
- }
-
- case ir.OADDSTR:
- // Merge adjacent constants in the argument list.
- n := n.(*ir.AddStringExpr)
- s := n.List
- need := 0
- for i := 0; i < len(s); i++ {
- if i == 0 || !ir.IsConst(s[i-1], constant.String) || !ir.IsConst(s[i], constant.String) {
- // Can't merge s[i] into s[i-1]; need a slot in the list.
- need++
- }
- }
- if need == len(s) {
- return n
- }
- if need == 1 {
- var strs []string
- for _, c := range s {
- strs = append(strs, ir.StringVal(c))
- }
- return origConst(n, constant.MakeString(strings.Join(strs, "")))
- }
- newList := make([]ir.Node, 0, need)
- for i := 0; i < len(s); i++ {
- if ir.IsConst(s[i], constant.String) && i+1 < len(s) && ir.IsConst(s[i+1], constant.String) {
- // merge from i up to but not including i2
- var strs []string
- i2 := i
- for i2 < len(s) && ir.IsConst(s[i2], constant.String) {
- strs = append(strs, ir.StringVal(s[i2]))
- i2++
- }
-
- nl := ir.Copy(n).(*ir.AddStringExpr)
- nl.List.Set(s[i:i2])
- newList = append(newList, origConst(nl, constant.MakeString(strings.Join(strs, ""))))
- i = i2 - 1
- } else {
- newList = append(newList, s[i])
- }
- }
-
- nn := ir.Copy(n).(*ir.AddStringExpr)
- nn.List.Set(newList)
- return nn
-
- case ir.OCAP, ir.OLEN:
- n := n.(*ir.UnaryExpr)
- nl := n.X
- switch nl.Type().Kind() {
- case types.TSTRING:
- if ir.IsConst(nl, constant.String) {
- return origIntConst(n, int64(len(ir.StringVal(nl))))
- }
- case types.TARRAY:
- if !anyCallOrChan(nl) {
- return origIntConst(n, nl.Type().NumElem())
- }
- }
-
- case ir.OALIGNOF, ir.OOFFSETOF, ir.OSIZEOF:
- n := n.(*ir.UnaryExpr)
- return origIntConst(n, evalunsafe(n))
-
- case ir.OREAL:
- n := n.(*ir.UnaryExpr)
- nl := n.X
- if nl.Op() == ir.OLITERAL {
- return origConst(n, constant.Real(nl.Val()))
- }
-
- case ir.OIMAG:
- n := n.(*ir.UnaryExpr)
- nl := n.X
- if nl.Op() == ir.OLITERAL {
- return origConst(n, constant.Imag(nl.Val()))
- }
-
- case ir.OCOMPLEX:
- n := n.(*ir.BinaryExpr)
- nl, nr := n.X, n.Y
- if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
- return origConst(n, makeComplex(nl.Val(), nr.Val()))
- }
- }
-
- return n
-}
-
-func makeInt(i *big.Int) constant.Value {
- if i.IsInt64() {
- return constant.Make(i.Int64()) // workaround #42640 (Int64Val(Make(big.NewInt(10))) returns (10, false), not (10, true))
- }
- return constant.Make(i)
-}
-
-func makeFloat64(f float64) constant.Value {
- if math.IsInf(f, 0) {
- base.Fatalf("infinity is not a valid constant")
- }
- v := constant.MakeFloat64(f)
- v = constant.ToFloat(v) // workaround #42641 (MakeFloat64(0).Kind() returns Int, not Float)
- return v
-}
-
-func makeComplex(real, imag constant.Value) constant.Value {
- return constant.BinaryOp(constant.ToFloat(real), token.ADD, constant.MakeImag(constant.ToFloat(imag)))
-}
-
-func square(x constant.Value) constant.Value {
- return constant.BinaryOp(x, token.MUL, x)
-}
-
-// For matching historical "constant OP overflow" error messages.
-// TODO(mdempsky): Replace with error messages like go/types uses.
-var overflowNames = [...]string{
- ir.OADD: "addition",
- ir.OSUB: "subtraction",
- ir.OMUL: "multiplication",
- ir.OLSH: "shift",
- ir.OXOR: "bitwise XOR",
- ir.OBITNOT: "bitwise complement",
-}
-
-// origConst returns an OLITERAL with orig n and value v.
-func origConst(n ir.Node, v constant.Value) ir.Node {
- lno := ir.SetPos(n)
- v = convertVal(v, n.Type(), false)
- base.Pos = lno
-
- switch v.Kind() {
- case constant.Int:
- if constant.BitLen(v) <= ir.ConstPrec {
- break
- }
- fallthrough
- case constant.Unknown:
- what := overflowNames[n.Op()]
- if what == "" {
- base.Fatalf("unexpected overflow: %v", n.Op())
- }
- base.ErrorfAt(n.Pos(), "constant %v overflow", what)
- n.SetType(nil)
- return n
- }
-
- return ir.NewConstExpr(v, n)
-}
-
-func origBoolConst(n ir.Node, v bool) ir.Node {
- return origConst(n, constant.MakeBool(v))
-}
-
-func origIntConst(n ir.Node, v int64) ir.Node {
- return origConst(n, constant.MakeInt64(v))
-}
-
-// defaultlit on both nodes simultaneously;
-// if they're both ideal going in they better
-// get the same type going out.
-// force means must assign concrete (non-ideal) type.
-// The results of defaultlit2 MUST be assigned back to l and r, e.g.
-// n.Left, n.Right = defaultlit2(n.Left, n.Right, force)
-func defaultlit2(l ir.Node, r ir.Node, force bool) (ir.Node, ir.Node) {
- if l.Type() == nil || r.Type() == nil {
- return l, r
- }
- if !l.Type().IsUntyped() {
- r = convlit(r, l.Type())
- return l, r
- }
-
- if !r.Type().IsUntyped() {
- l = convlit(l, r.Type())
- return l, r
- }
-
- if !force {
- return l, r
- }
-
- // Can't mix bool with non-bool, string with non-string, or nil with anything (untyped).
- if l.Type().IsBoolean() != r.Type().IsBoolean() {
- return l, r
- }
- if l.Type().IsString() != r.Type().IsString() {
- return l, r
- }
- if ir.IsNil(l) || ir.IsNil(r) {
- return l, r
- }
-
- t := defaultType(mixUntyped(l.Type(), r.Type()))
- l = convlit(l, t)
- r = convlit(r, t)
- return l, r
-}
-
-func mixUntyped(t1, t2 *types.Type) *types.Type {
- if t1 == t2 {
- return t1
- }
-
- rank := func(t *types.Type) int {
- switch t {
- case types.UntypedInt:
- return 0
- case types.UntypedRune:
- return 1
- case types.UntypedFloat:
- return 2
- case types.UntypedComplex:
- return 3
- }
- base.Fatalf("bad type %v", t)
- panic("unreachable")
- }
-
- if rank(t2) > rank(t1) {
- return t2
- }
- return t1
-}
-
-func defaultType(t *types.Type) *types.Type {
- if !t.IsUntyped() || t.Kind() == types.TNIL {
- return t
- }
-
- switch t {
- case types.UntypedBool:
- return types.Types[types.TBOOL]
- case types.UntypedString:
- return types.Types[types.TSTRING]
- case types.UntypedInt:
- return types.Types[types.TINT]
- case types.UntypedRune:
- return types.RuneType
- case types.UntypedFloat:
- return types.Types[types.TFLOAT64]
- case types.UntypedComplex:
- return types.Types[types.TCOMPLEX128]
- }
-
- base.Fatalf("bad type %v", t)
- return nil
-}
-
-// indexconst checks if Node n contains a constant expression
-// representable as a non-negative int and returns its value.
-// If n is not a constant expression, not representable as an
-// integer, or negative, it returns -1. If n is too large, it
-// returns -2.
-func indexconst(n ir.Node) int64 {
- if n.Op() != ir.OLITERAL {
- return -1
- }
- if !n.Type().IsInteger() && n.Type().Kind() != types.TIDEAL {
- return -1
- }
-
- v := toint(n.Val())
- if v.Kind() != constant.Int || constant.Sign(v) < 0 {
- return -1
- }
- if ir.ConstOverflow(v, types.Types[types.TINT]) {
- return -2
- }
- return ir.IntVal(types.Types[types.TINT], v)
-}
-
-// anyCallOrChan reports whether n contains any calls or channel operations.
-func anyCallOrChan(n ir.Node) bool {
- return ir.Any(n, func(n ir.Node) bool {
- switch n.Op() {
- case ir.OAPPEND,
- ir.OCALL,
- ir.OCALLFUNC,
- ir.OCALLINTER,
- ir.OCALLMETH,
- ir.OCAP,
- ir.OCLOSE,
- ir.OCOMPLEX,
- ir.OCOPY,
- ir.ODELETE,
- ir.OIMAG,
- ir.OLEN,
- ir.OMAKE,
- ir.ONEW,
- ir.OPANIC,
- ir.OPRINT,
- ir.OPRINTN,
- ir.OREAL,
- ir.ORECOVER,
- ir.ORECV:
- return true
- }
- return false
- })
-}
-
-// A constSet represents a set of Go constant expressions.
-type constSet struct {
- m map[constSetKey]src.XPos
-}
-
-type constSetKey struct {
- typ *types.Type
- val interface{}
-}
-
-// add adds constant expression n to s. If a constant expression of
-// equal value and identical type has already been added, then add
-// reports an error about the duplicate value.
-//
-// pos provides position information for where expression n occurred
-// (in case n does not have its own position information). what and
-// where are used in the error message.
-//
-// n must not be an untyped constant.
-func (s *constSet) add(pos src.XPos, n ir.Node, what, where string) {
- if conv := n; conv.Op() == ir.OCONVIFACE {
- conv := conv.(*ir.ConvExpr)
- if conv.Implicit() {
- n = conv.X
- }
- }
-
- if !ir.IsConstNode(n) {
- return
- }
- if n.Type().IsUntyped() {
- base.Fatalf("%v is untyped", n)
- }
-
- // Consts are only duplicates if they have the same value and
- // identical types.
- //
- // In general, we have to use types.Identical to test type
- // identity, because == gives false negatives for anonymous
- // types and the byte/uint8 and rune/int32 builtin type
- // aliases. However, this is not a problem here, because
- // constant expressions are always untyped or have a named
- // type, and we explicitly handle the builtin type aliases
- // below.
- //
- // This approach may need to be revisited though if we fix
- // #21866 by treating all type aliases like byte/uint8 and
- // rune/int32.
-
- typ := n.Type()
- switch typ {
- case types.ByteType:
- typ = types.Types[types.TUINT8]
- case types.RuneType:
- typ = types.Types[types.TINT32]
- }
- k := constSetKey{typ, ir.ConstValue(n)}
-
- if ir.HasUniquePos(n) {
- pos = n.Pos()
- }
-
- if s.m == nil {
- s.m = make(map[constSetKey]src.XPos)
- }
-
- if prevPos, isDup := s.m[k]; isDup {
- base.ErrorfAt(pos, "duplicate %s %s in %s\n\tprevious %s at %v",
- what, nodeAndVal(n), where,
- what, base.FmtPos(prevPos))
- } else {
- s.m[k] = pos
- }
-}
-
-// nodeAndVal reports both an expression and its constant value, if
-// the latter is non-obvious.
-//
-// TODO(mdempsky): This could probably be a fmt.go flag.
-func nodeAndVal(n ir.Node) string {
- show := fmt.Sprint(n)
- val := ir.ConstValue(n)
- if s := fmt.Sprintf("%#v", val); show != s {
- show += " (value " + s + ")"
- }
- return show
-}
diff --git a/src/cmd/compile/internal/gc/dcl.go b/src/cmd/compile/internal/gc/dcl.go
index 1189d0ec12..e53bba44ad 100644
--- a/src/cmd/compile/internal/gc/dcl.go
+++ b/src/cmd/compile/internal/gc/dcl.go
@@ -8,11 +8,11 @@ import (
"bytes"
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/src"
"fmt"
- "strings"
)
func EnableNoWriteBarrierRecCheck() {
@@ -28,154 +28,6 @@ func NoWriteBarrierRecCheck() {
var nowritebarrierrecCheck *nowritebarrierrecChecker
-// redeclare emits a diagnostic about symbol s being redeclared at pos.
-func redeclare(pos src.XPos, s *types.Sym, where string) {
- if !s.Lastlineno.IsKnown() {
- pkgName := dotImportRefs[s.Def.(*ir.Ident)]
- base.ErrorfAt(pos, "%v redeclared %s\n"+
- "\t%v: previous declaration during import %q", s, where, base.FmtPos(pkgName.Pos()), pkgName.Pkg.Path)
- } else {
- prevPos := s.Lastlineno
-
- // When an import and a declaration collide in separate files,
- // present the import as the "redeclared", because the declaration
- // is visible where the import is, but not vice versa.
- // See issue 4510.
- if s.Def == nil {
- pos, prevPos = prevPos, pos
- }
-
- base.ErrorfAt(pos, "%v redeclared %s\n"+
- "\t%v: previous declaration", s, where, base.FmtPos(prevPos))
- }
-}
-
-var vargen int
-
-// declare individual names - var, typ, const
-
-var declare_typegen int
-
-// declare records that Node n declares symbol n.Sym in the specified
-// declaration context.
-func declare(n *ir.Name, ctxt ir.Class) {
- if ir.IsBlank(n) {
- return
- }
-
- s := n.Sym()
-
- // kludgy: typecheckok means we're past parsing. Eg genwrapper may declare out of package names later.
- if !inimport && !typecheckok && s.Pkg != types.LocalPkg {
- base.ErrorfAt(n.Pos(), "cannot declare name %v", s)
- }
-
- gen := 0
- if ctxt == ir.PEXTERN {
- if s.Name == "init" {
- base.ErrorfAt(n.Pos(), "cannot declare init - must be func")
- }
- if s.Name == "main" && s.Pkg.Name == "main" {
- base.ErrorfAt(n.Pos(), "cannot declare main - must be func")
- }
- Target.Externs = append(Target.Externs, n)
- } else {
- if ir.CurFunc == nil && ctxt == ir.PAUTO {
- base.Pos = n.Pos()
- base.Fatalf("automatic outside function")
- }
- if ir.CurFunc != nil && ctxt != ir.PFUNC && n.Op() == ir.ONAME {
- ir.CurFunc.Dcl = append(ir.CurFunc.Dcl, n)
- }
- if n.Op() == ir.OTYPE {
- declare_typegen++
- gen = declare_typegen
- } else if n.Op() == ir.ONAME && ctxt == ir.PAUTO && !strings.Contains(s.Name, "·") {
- vargen++
- gen = vargen
- }
- types.Pushdcl(s)
- n.Curfn = ir.CurFunc
- }
-
- if ctxt == ir.PAUTO {
- n.SetFrameOffset(0)
- }
-
- if s.Block == types.Block {
- // functype will print errors about duplicate function arguments.
- // Don't repeat the error here.
- if ctxt != ir.PPARAM && ctxt != ir.PPARAMOUT {
- redeclare(n.Pos(), s, "in this block")
- }
- }
-
- s.Block = types.Block
- s.Lastlineno = base.Pos
- s.Def = n
- n.Vargen = int32(gen)
- n.Class_ = ctxt
- if ctxt == ir.PFUNC {
- n.Sym().SetFunc(true)
- }
-
- autoexport(n, ctxt)
-}
-
-// declare variables from grammar
-// new_name_list (type | [type] = expr_list)
-func variter(vl []*ir.Name, t ir.Ntype, el []ir.Node) []ir.Node {
- var init []ir.Node
- doexpr := len(el) > 0
-
- if len(el) == 1 && len(vl) > 1 {
- e := el[0]
- as2 := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
- as2.Rhs = []ir.Node{e}
- for _, v := range vl {
- as2.Lhs.Append(v)
- declare(v, dclcontext)
- v.Ntype = t
- v.Defn = as2
- if ir.CurFunc != nil {
- init = append(init, ir.NewDecl(base.Pos, ir.ODCL, v))
- }
- }
-
- return append(init, as2)
- }
-
- for i, v := range vl {
- var e ir.Node
- if doexpr {
- if i >= len(el) {
- base.Errorf("assignment mismatch: %d variables but %d values", len(vl), len(el))
- break
- }
- e = el[i]
- }
-
- declare(v, dclcontext)
- v.Ntype = t
-
- if e != nil || ir.CurFunc != nil || ir.IsBlank(v) {
- if ir.CurFunc != nil {
- init = append(init, ir.NewDecl(base.Pos, ir.ODCL, v))
- }
- as := ir.NewAssignStmt(base.Pos, v, e)
- init = append(init, as)
- if e != nil {
- v.Defn = as
- }
- }
- }
-
- if len(el) > len(vl) {
- base.Errorf("assignment mismatch: %d variables but %d values", len(vl), len(el))
- }
- return init
-}
-
// oldname returns the Node that declares symbol s in the current scope.
// If no such Node currently exists, an ONONAME Node is returned instead.
// Automatically creates a new closure variable if the referenced symbol was
@@ -204,7 +56,7 @@ func oldname(s *types.Sym) ir.Node {
c := n.Name().Innermost
if c == nil || c.Curfn != ir.CurFunc {
// Do not have a closure var for the active closure yet; make one.
- c = NewName(s)
+ c = typecheck.NewName(s)
c.Class_ = ir.PAUTOHEAP
c.SetIsClosureVar(true)
c.SetIsDDD(n.IsDDD())
@@ -236,419 +88,10 @@ func importName(sym *types.Sym) ir.Node {
return n
}
-// := declarations
-func colasname(n ir.Node) bool {
- switch n.Op() {
- case ir.ONAME,
- ir.ONONAME,
- ir.OPACK,
- ir.OTYPE,
- ir.OLITERAL:
- return n.Sym() != nil
- }
-
- return false
-}
-
-func colasdefn(left []ir.Node, defn ir.Node) {
- for _, n := range left {
- if n.Sym() != nil {
- n.Sym().SetUniq(true)
- }
- }
-
- var nnew, nerr int
- for i, n := range left {
- if ir.IsBlank(n) {
- continue
- }
- if !colasname(n) {
- base.ErrorfAt(defn.Pos(), "non-name %v on left side of :=", n)
- nerr++
- continue
- }
-
- if !n.Sym().Uniq() {
- base.ErrorfAt(defn.Pos(), "%v repeated on left side of :=", n.Sym())
- n.SetDiag(true)
- nerr++
- continue
- }
-
- n.Sym().SetUniq(false)
- if n.Sym().Block == types.Block {
- continue
- }
-
- nnew++
- n := NewName(n.Sym())
- declare(n, dclcontext)
- n.Defn = defn
- defn.PtrInit().Append(ir.NewDecl(base.Pos, ir.ODCL, n))
- left[i] = n
- }
-
- if nnew == 0 && nerr == 0 {
- base.ErrorfAt(defn.Pos(), "no new variables on left side of :=")
- }
-}
-
-// declare the function proper
-// and declare the arguments.
-// called in extern-declaration context
-// returns in auto-declaration context.
-func funchdr(fn *ir.Func) {
- // change the declaration context from extern to auto
- funcStack = append(funcStack, funcStackEnt{ir.CurFunc, dclcontext})
- ir.CurFunc = fn
- dclcontext = ir.PAUTO
-
- types.Markdcl()
-
- if fn.Nname.Ntype != nil {
- funcargs(fn.Nname.Ntype.(*ir.FuncType))
- } else {
- funcargs2(fn.Type())
- }
-}
-
-func funcargs(nt *ir.FuncType) {
- if nt.Op() != ir.OTFUNC {
- base.Fatalf("funcargs %v", nt.Op())
- }
-
- // re-start the variable generation number
- // we want to use small numbers for the return variables,
- // so let them have the chunk starting at 1.
- //
- // TODO(mdempsky): This is ugly, and only necessary because
- // esc.go uses Vargen to figure out result parameters' index
- // within the result tuple.
- vargen = len(nt.Results)
-
- // declare the receiver and in arguments.
- if nt.Recv != nil {
- funcarg(nt.Recv, ir.PPARAM)
- }
- for _, n := range nt.Params {
- funcarg(n, ir.PPARAM)
- }
-
- oldvargen := vargen
- vargen = 0
-
- // declare the out arguments.
- gen := len(nt.Params)
- for _, n := range nt.Results {
- if n.Sym == nil {
- // Name so that escape analysis can track it. ~r stands for 'result'.
- n.Sym = lookupN("~r", gen)
- gen++
- }
- if n.Sym.IsBlank() {
- // Give it a name so we can assign to it during return. ~b stands for 'blank'.
- // The name must be different from ~r above because if you have
- // func f() (_ int)
- // func g() int
- // f is allowed to use a plain 'return' with no arguments, while g is not.
- // So the two cases must be distinguished.
- n.Sym = lookupN("~b", gen)
- gen++
- }
-
- funcarg(n, ir.PPARAMOUT)
- }
-
- vargen = oldvargen
-}
-
-func funcarg(n *ir.Field, ctxt ir.Class) {
- if n.Sym == nil {
- return
- }
-
- name := ir.NewNameAt(n.Pos, n.Sym)
- n.Decl = name
- name.Ntype = n.Ntype
- name.SetIsDDD(n.IsDDD)
- declare(name, ctxt)
-
- vargen++
- n.Decl.Vargen = int32(vargen)
-}
-
-// Same as funcargs, except run over an already constructed TFUNC.
-// This happens during import, where the hidden_fndcl rule has
-// used functype directly to parse the function's type.
-func funcargs2(t *types.Type) {
- if t.Kind() != types.TFUNC {
- base.Fatalf("funcargs2 %v", t)
- }
-
- for _, f := range t.Recvs().Fields().Slice() {
- funcarg2(f, ir.PPARAM)
- }
- for _, f := range t.Params().Fields().Slice() {
- funcarg2(f, ir.PPARAM)
- }
- for _, f := range t.Results().Fields().Slice() {
- funcarg2(f, ir.PPARAMOUT)
- }
-}
-
-func funcarg2(f *types.Field, ctxt ir.Class) {
- if f.Sym == nil {
- return
- }
- n := ir.NewNameAt(f.Pos, f.Sym)
- f.Nname = n
- n.SetType(f.Type)
- n.SetIsDDD(f.IsDDD())
- declare(n, ctxt)
-}
-
-var funcStack []funcStackEnt // stack of previous values of Curfn/dclcontext
-
-type funcStackEnt struct {
- curfn *ir.Func
- dclcontext ir.Class
-}
-
-func CheckFuncStack() {
- if len(funcStack) != 0 {
- base.Fatalf("funcStack is non-empty: %v", len(funcStack))
- }
-}
-
-// finish the body.
-// called in auto-declaration context.
-// returns in extern-declaration context.
-func funcbody() {
- // change the declaration context from auto to previous context
- types.Popdcl()
- var e funcStackEnt
- funcStack, e = funcStack[:len(funcStack)-1], funcStack[len(funcStack)-1]
- ir.CurFunc, dclcontext = e.curfn, e.dclcontext
-}
-
-// structs, functions, and methods.
-// they don't belong here, but where do they belong?
-func checkembeddedtype(t *types.Type) {
- if t == nil {
- return
- }
-
- if t.Sym() == nil && t.IsPtr() {
- t = t.Elem()
- if t.IsInterface() {
- base.Errorf("embedded type cannot be a pointer to interface")
- }
- }
-
- if t.IsPtr() || t.IsUnsafePtr() {
- base.Errorf("embedded type cannot be a pointer")
- } else if t.Kind() == types.TFORW && !t.ForwardType().Embedlineno.IsKnown() {
- t.ForwardType().Embedlineno = base.Pos
- }
-}
-
-// checkdupfields emits errors for duplicately named fields or methods in
-// a list of struct or interface types.
-func checkdupfields(what string, fss ...[]*types.Field) {
- seen := make(map[*types.Sym]bool)
- for _, fs := range fss {
- for _, f := range fs {
- if f.Sym == nil || f.Sym.IsBlank() {
- continue
- }
- if seen[f.Sym] {
- base.ErrorfAt(f.Pos, "duplicate %s %s", what, f.Sym.Name)
- continue
- }
- seen[f.Sym] = true
- }
- }
-}
-
-// convert a parsed id/type list into
-// a type for struct/interface/arglist
-func tostruct(l []*ir.Field) *types.Type {
- lno := base.Pos
-
- fields := make([]*types.Field, len(l))
- for i, n := range l {
- base.Pos = n.Pos
-
- if n.Ntype != nil {
- n.Type = typecheckNtype(n.Ntype).Type()
- n.Ntype = nil
- }
- f := types.NewField(n.Pos, n.Sym, n.Type)
- if n.Embedded {
- checkembeddedtype(n.Type)
- f.Embedded = 1
- }
- f.Note = n.Note
- fields[i] = f
- }
- checkdupfields("field", fields)
-
- base.Pos = lno
- return types.NewStruct(types.LocalPkg, fields)
-}
-
-func tointerface(nmethods []*ir.Field) *types.Type {
- if len(nmethods) == 0 {
- return types.Types[types.TINTER]
- }
-
- lno := base.Pos
-
- methods := make([]*types.Field, len(nmethods))
- for i, n := range nmethods {
- base.Pos = n.Pos
- if n.Ntype != nil {
- n.Type = typecheckNtype(n.Ntype).Type()
- n.Ntype = nil
- }
- methods[i] = types.NewField(n.Pos, n.Sym, n.Type)
- }
-
- base.Pos = lno
- return types.NewInterface(types.LocalPkg, methods)
-}
-
func fakeRecv() *ir.Field {
return ir.NewField(base.Pos, nil, nil, types.FakeRecvType())
}
-func fakeRecvField() *types.Field {
- return types.NewField(src.NoXPos, nil, types.FakeRecvType())
-}
-
-// turn a parsed function declaration into a type
-func functype(nrecv *ir.Field, nparams, nresults []*ir.Field) *types.Type {
- funarg := func(n *ir.Field) *types.Field {
- lno := base.Pos
- base.Pos = n.Pos
-
- if n.Ntype != nil {
- n.Type = typecheckNtype(n.Ntype).Type()
- n.Ntype = nil
- }
-
- f := types.NewField(n.Pos, n.Sym, n.Type)
- f.SetIsDDD(n.IsDDD)
- if n.Decl != nil {
- n.Decl.SetType(f.Type)
- f.Nname = n.Decl
- }
-
- base.Pos = lno
- return f
- }
- funargs := func(nn []*ir.Field) []*types.Field {
- res := make([]*types.Field, len(nn))
- for i, n := range nn {
- res[i] = funarg(n)
- }
- return res
- }
-
- var recv *types.Field
- if nrecv != nil {
- recv = funarg(nrecv)
- }
-
- t := types.NewSignature(types.LocalPkg, recv, funargs(nparams), funargs(nresults))
- checkdupfields("argument", t.Recvs().FieldSlice(), t.Params().FieldSlice(), t.Results().FieldSlice())
- return t
-}
-
-// Add a method, declared as a function.
-// - msym is the method symbol
-// - t is function type (with receiver)
-// Returns a pointer to the existing or added Field; or nil if there's an error.
-func addmethod(n *ir.Func, msym *types.Sym, t *types.Type, local, nointerface bool) *types.Field {
- if msym == nil {
- base.Fatalf("no method symbol")
- }
-
- // get parent type sym
- rf := t.Recv() // ptr to this structure
- if rf == nil {
- base.Errorf("missing receiver")
- return nil
- }
-
- mt := types.ReceiverBaseType(rf.Type)
- if mt == nil || mt.Sym() == nil {
- pa := rf.Type
- t := pa
- if t != nil && t.IsPtr() {
- if t.Sym() != nil {
- base.Errorf("invalid receiver type %v (%v is a pointer type)", pa, t)
- return nil
- }
- t = t.Elem()
- }
-
- switch {
- case t == nil || t.Broke():
- // rely on typecheck having complained before
- case t.Sym() == nil:
- base.Errorf("invalid receiver type %v (%v is not a defined type)", pa, t)
- case t.IsPtr():
- base.Errorf("invalid receiver type %v (%v is a pointer type)", pa, t)
- case t.IsInterface():
- base.Errorf("invalid receiver type %v (%v is an interface type)", pa, t)
- default:
- // Should have picked off all the reasons above,
- // but just in case, fall back to generic error.
- base.Errorf("invalid receiver type %v (%L / %L)", pa, pa, t)
- }
- return nil
- }
-
- if local && mt.Sym().Pkg != types.LocalPkg {
- base.Errorf("cannot define new methods on non-local type %v", mt)
- return nil
- }
-
- if msym.IsBlank() {
- return nil
- }
-
- if mt.IsStruct() {
- for _, f := range mt.Fields().Slice() {
- if f.Sym == msym {
- base.Errorf("type %v has both field and method named %v", mt, msym)
- f.SetBroke(true)
- return nil
- }
- }
- }
-
- for _, f := range mt.Methods().Slice() {
- if msym.Name != f.Sym.Name {
- continue
- }
- // types.Identical only checks that incoming and result parameters match,
- // so explicitly check that the receiver parameters match too.
- if !types.Identical(t, f.Type) || !types.Identical(t.Recv().Type, f.Type.Recv().Type) {
- base.Errorf("method redeclared: %v.%v\n\t%v\n\t%v", mt, msym, f.Type, t)
- }
- return f
- }
-
- f := types.NewField(base.Pos, msym, t)
- f.Nname = n.Nname
- f.SetNointerface(nointerface)
-
- mt.Methods().Append(f)
- return f
-}
-
// funcsym returns s·f.
func funcsym(s *types.Sym) *types.Sym {
// funcsymsmu here serves to protect not just mutations of funcsyms (below),
@@ -700,21 +143,6 @@ func makefuncsym(s *types.Sym) {
}
}
-func dclfunc(sym *types.Sym, tfn ir.Ntype) *ir.Func {
- if tfn.Op() != ir.OTFUNC {
- base.Fatalf("expected OTFUNC node, got %v", tfn)
- }
-
- fn := ir.NewFunc(base.Pos)
- fn.Nname = ir.NewFuncNameAt(base.Pos, sym, fn)
- fn.Nname.Defn = fn
- fn.Nname.Ntype = tfn
- ir.MarkFunc(fn.Nname)
- funchdr(fn)
- fn.Nname.Ntype = typecheckNtype(fn.Nname.Ntype)
- return fn
-}
-
type nowritebarrierrecChecker struct {
// extraCalls contains extra function calls that may not be
// visible during later analysis. It maps from the ODCLFUNC of
@@ -742,7 +170,7 @@ func newNowritebarrierrecChecker() *nowritebarrierrecChecker {
// important to handle it for this check, so we model it
// directly. This has to happen before transformclosure since
// it's a lot harder to work out the argument after.
- for _, n := range Target.Decls {
+ for _, n := range typecheck.Target.Decls {
if n.Op() != ir.ODCLFUNC {
continue
}
@@ -819,7 +247,7 @@ func (c *nowritebarrierrecChecker) check() {
// q is the queue of ODCLFUNC Nodes to visit in BFS order.
var q ir.NameQueue
- for _, n := range Target.Decls {
+ for _, n := range typecheck.Target.Decls {
if n.Op() != ir.ODCLFUNC {
continue
}
diff --git a/src/cmd/compile/internal/gc/embed.go b/src/cmd/compile/internal/gc/embed.go
index 70c5c2a25a..bcfec3cad3 100644
--- a/src/cmd/compile/internal/gc/embed.go
+++ b/src/cmd/compile/internal/gc/embed.go
@@ -8,6 +8,7 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/syntax"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/obj"
@@ -61,13 +62,13 @@ func varEmbed(p *noder, names []*ir.Name, typ ir.Ntype, exprs []ir.Node, embeds
p.errorAt(pos, "go:embed cannot apply to var without type")
return exprs
}
- if dclcontext != ir.PEXTERN {
+ if typecheck.DeclContext != ir.PEXTERN {
p.errorAt(pos, "go:embed cannot apply to var inside func")
return exprs
}
v := names[0]
- Target.Embeds = append(Target.Embeds, v)
+ typecheck.Target.Embeds = append(typecheck.Target.Embeds, v)
v.Embed = new([]ir.Embed)
for _, e := range embeds {
*v.Embed = append(*v.Embed, ir.Embed{Pos: p.makeXPos(e.Pos), Patterns: e.Patterns})
@@ -184,7 +185,7 @@ func embedFileLess(x, y string) bool {
}
func dumpembeds() {
- for _, v := range Target.Embeds {
+ for _, v := range typecheck.Target.Embeds {
initEmbed(v)
}
}
diff --git a/src/cmd/compile/internal/gc/escape.go b/src/cmd/compile/internal/gc/escape.go
index 6843d8b00e..187313695f 100644
--- a/src/cmd/compile/internal/gc/escape.go
+++ b/src/cmd/compile/internal/gc/escape.go
@@ -8,6 +8,7 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/logopt"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/src"
"fmt"
@@ -870,7 +871,7 @@ func (e *Escape) call(ks []EscHole, call, where ir.Node) {
case ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER:
call := call.(*ir.CallExpr)
- fixVariadicCall(call)
+ typecheck.FixVariadicCall(call)
// Pick out the function callee, if statically known.
var fn *ir.Name
@@ -1877,10 +1878,10 @@ func heapAllocReason(n ir.Node) string {
return "too large for stack"
}
- if n.Op() == ir.OCLOSURE && closureType(n.(*ir.ClosureExpr)).Size() >= ir.MaxImplicitStackVarSize {
+ if n.Op() == ir.OCLOSURE && typecheck.ClosureType(n.(*ir.ClosureExpr)).Size() >= ir.MaxImplicitStackVarSize {
return "too large for stack"
}
- if n.Op() == ir.OCALLPART && partialCallType(n.(*ir.CallPartExpr)).Size() >= ir.MaxImplicitStackVarSize {
+ if n.Op() == ir.OCALLPART && typecheck.PartialCallType(n.(*ir.CallPartExpr)).Size() >= ir.MaxImplicitStackVarSize {
return "too large for stack"
}
@@ -1992,8 +1993,8 @@ func moveToHeap(n *ir.Name) {
// Allocate a local stack variable to hold the pointer to the heap copy.
// temp will add it to the function declaration list automatically.
- heapaddr := temp(types.NewPtr(n.Type()))
- heapaddr.SetSym(lookup("&" + n.Sym().Name))
+ heapaddr := typecheck.Temp(types.NewPtr(n.Type()))
+ heapaddr.SetSym(typecheck.Lookup("&" + n.Sym().Name))
heapaddr.SetPos(n.Pos())
// Unset AutoTemp to persist the &foo variable name through SSA to
@@ -2013,7 +2014,7 @@ func moveToHeap(n *ir.Name) {
// Preserve a copy so we can still write code referring to the original,
// and substitute that copy into the function declaration list
// so that analyses of the local (on-stack) variables use it.
- stackcopy := NewName(n.Sym())
+ stackcopy := typecheck.NewName(n.Sym())
stackcopy.SetType(n.Type())
stackcopy.SetFrameOffset(n.FrameOffset())
stackcopy.Class_ = n.Class_
diff --git a/src/cmd/compile/internal/gc/export.go b/src/cmd/compile/internal/gc/export.go
index 2855f815be..a414962431 100644
--- a/src/cmd/compile/internal/gc/export.go
+++ b/src/cmd/compile/internal/gc/export.go
@@ -7,9 +7,9 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/bio"
- "cmd/internal/src"
"fmt"
"go/constant"
)
@@ -21,54 +21,16 @@ func exportf(bout *bio.Writer, format string, args ...interface{}) {
}
}
-// exportsym marks n for export (or reexport).
-func exportsym(n *ir.Name) {
- if n.Sym().OnExportList() {
- return
- }
- n.Sym().SetOnExportList(true)
-
- if base.Flag.E != 0 {
- fmt.Printf("export symbol %v\n", n.Sym())
- }
-
- Target.Exports = append(Target.Exports, n)
-}
-
-func initname(s string) bool {
- return s == "init"
-}
-
-func autoexport(n *ir.Name, ctxt ir.Class) {
- if n.Sym().Pkg != types.LocalPkg {
- return
- }
- if (ctxt != ir.PEXTERN && ctxt != ir.PFUNC) || dclcontext != ir.PEXTERN {
- return
- }
- if n.Type() != nil && n.Type().IsKind(types.TFUNC) && ir.IsMethod(n) {
- return
- }
-
- if types.IsExported(n.Sym().Name) || initname(n.Sym().Name) {
- exportsym(n)
- }
- if base.Flag.AsmHdr != "" && !n.Sym().Asm() {
- n.Sym().SetAsm(true)
- Target.Asms = append(Target.Asms, n)
- }
-}
-
func dumpexport(bout *bio.Writer) {
p := &exporter{marked: make(map[*types.Type]bool)}
- for _, n := range Target.Exports {
+ for _, n := range typecheck.Target.Exports {
p.markObject(n)
}
// The linker also looks for the $$ marker - use char after $$ to distinguish format.
exportf(bout, "\n$$B\n") // indicate binary export format
off := bout.Offset()
- iexport(bout.Writer)
+ typecheck.WriteExports(bout.Writer)
size := bout.Offset() - off
exportf(bout, "\n$$\n")
@@ -77,78 +39,13 @@ func dumpexport(bout *bio.Writer) {
}
}
-func importsym(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op ir.Op, ctxt ir.Class) *ir.Name {
- if n := s.PkgDef(); n != nil {
- base.Fatalf("importsym of symbol that already exists: %v", n)
- }
-
- n := ir.NewDeclNameAt(pos, op, s)
- n.Class_ = ctxt // TODO(mdempsky): Move this into NewDeclNameAt too?
- s.SetPkgDef(n)
- s.Importdef = ipkg
- return n
-}
-
-// importtype returns the named type declared by symbol s.
-// If no such type has been declared yet, a forward declaration is returned.
-// ipkg is the package being imported
-func importtype(ipkg *types.Pkg, pos src.XPos, s *types.Sym) *ir.Name {
- n := importsym(ipkg, pos, s, ir.OTYPE, ir.PEXTERN)
- n.SetType(types.NewNamed(n))
- return n
-}
-
-// importobj declares symbol s as an imported object representable by op.
-// ipkg is the package being imported
-func importobj(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op ir.Op, ctxt ir.Class, t *types.Type) *ir.Name {
- n := importsym(ipkg, pos, s, op, ctxt)
- n.SetType(t)
- if ctxt == ir.PFUNC {
- n.Sym().SetFunc(true)
- }
- return n
-}
-
-// importconst declares symbol s as an imported constant with type t and value val.
-// ipkg is the package being imported
-func importconst(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type, val constant.Value) *ir.Name {
- n := importobj(ipkg, pos, s, ir.OLITERAL, ir.PEXTERN, t)
- n.SetVal(val)
- return n
-}
-
-// importfunc declares symbol s as an imported function with type t.
-// ipkg is the package being imported
-func importfunc(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) *ir.Name {
- n := importobj(ipkg, pos, s, ir.ONAME, ir.PFUNC, t)
-
- fn := ir.NewFunc(pos)
- fn.SetType(t)
- n.SetFunc(fn)
- fn.Nname = n
-
- return n
-}
-
-// importvar declares symbol s as an imported variable with type t.
-// ipkg is the package being imported
-func importvar(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) *ir.Name {
- return importobj(ipkg, pos, s, ir.ONAME, ir.PEXTERN, t)
-}
-
-// importalias declares symbol s as an imported type alias with type t.
-// ipkg is the package being imported
-func importalias(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) *ir.Name {
- return importobj(ipkg, pos, s, ir.OTYPE, ir.PEXTERN, t)
-}
-
func dumpasmhdr() {
b, err := bio.Create(base.Flag.AsmHdr)
if err != nil {
base.Fatalf("%v", err)
}
fmt.Fprintf(b, "// generated by compile -asmhdr from package %s\n\n", types.LocalPkg.Name)
- for _, n := range Target.Asms {
+ for _, n := range typecheck.Target.Asms {
if n.Sym().IsBlank() {
continue
}
@@ -176,3 +73,83 @@ func dumpasmhdr() {
b.Close()
}
+
+type exporter struct {
+ marked map[*types.Type]bool // types already seen by markType
+}
+
+// markObject visits a reachable object.
+func (p *exporter) markObject(n ir.Node) {
+ if n.Op() == ir.ONAME {
+ n := n.(*ir.Name)
+ if n.Class_ == ir.PFUNC {
+ inlFlood(n, typecheck.Export)
+ }
+ }
+
+ p.markType(n.Type())
+}
+
+// markType recursively visits types reachable from t to identify
+// functions whose inline bodies may be needed.
+func (p *exporter) markType(t *types.Type) {
+ if p.marked[t] {
+ return
+ }
+ p.marked[t] = true
+
+ // If this is a named type, mark all of its associated
+ // methods. Skip interface types because t.Methods contains
+ // only their unexpanded method set (i.e., exclusive of
+ // interface embeddings), and the switch statement below
+ // handles their full method set.
+ if t.Sym() != nil && t.Kind() != types.TINTER {
+ for _, m := range t.Methods().Slice() {
+ if types.IsExported(m.Sym.Name) {
+ p.markObject(ir.AsNode(m.Nname))
+ }
+ }
+ }
+
+ // Recursively mark any types that can be produced given a
+ // value of type t: dereferencing a pointer; indexing or
+ // iterating over an array, slice, or map; receiving from a
+ // channel; accessing a struct field or interface method; or
+ // calling a function.
+ //
+ // Notably, we don't mark function parameter types, because
+ // the user already needs some way to construct values of
+ // those types.
+ switch t.Kind() {
+ case types.TPTR, types.TARRAY, types.TSLICE:
+ p.markType(t.Elem())
+
+ case types.TCHAN:
+ if t.ChanDir().CanRecv() {
+ p.markType(t.Elem())
+ }
+
+ case types.TMAP:
+ p.markType(t.Key())
+ p.markType(t.Elem())
+
+ case types.TSTRUCT:
+ for _, f := range t.FieldSlice() {
+ if types.IsExported(f.Sym.Name) || f.Embedded != 0 {
+ p.markType(f.Type)
+ }
+ }
+
+ case types.TFUNC:
+ for _, f := range t.Results().FieldSlice() {
+ p.markType(f.Type)
+ }
+
+ case types.TINTER:
+ for _, f := range t.FieldSlice() {
+ if types.IsExported(f.Sym.Name) {
+ p.markType(f.Type)
+ }
+ }
+ }
+}
diff --git a/src/cmd/compile/internal/gc/gen.go b/src/cmd/compile/internal/gc/gen.go
deleted file mode 100644
index 1084ff883f..0000000000
--- a/src/cmd/compile/internal/gc/gen.go
+++ /dev/null
@@ -1,76 +0,0 @@
-// Copyright 2009 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package gc
-
-import (
- "cmd/compile/internal/base"
- "cmd/compile/internal/ir"
- "cmd/compile/internal/types"
- "cmd/internal/obj"
- "cmd/internal/src"
- "strconv"
-)
-
-// sysfunc looks up Go function name in package runtime. This function
-// must follow the internal calling convention.
-func sysfunc(name string) *obj.LSym {
- s := ir.Pkgs.Runtime.Lookup(name)
- s.SetFunc(true)
- return s.Linksym()
-}
-
-// sysvar looks up a variable (or assembly function) name in package
-// runtime. If this is a function, it may have a special calling
-// convention.
-func sysvar(name string) *obj.LSym {
- return ir.Pkgs.Runtime.Lookup(name).Linksym()
-}
-
-// autotmpname returns the name for an autotmp variable numbered n.
-func autotmpname(n int) string {
- // Give each tmp a different name so that they can be registerized.
- // Add a preceding . to avoid clashing with legal names.
- const prefix = ".autotmp_"
- // Start with a buffer big enough to hold a large n.
- b := []byte(prefix + " ")[:len(prefix)]
- b = strconv.AppendInt(b, int64(n), 10)
- return types.InternString(b)
-}
-
-// make a new Node off the books
-func tempAt(pos src.XPos, curfn *ir.Func, t *types.Type) *ir.Name {
- if curfn == nil {
- base.Fatalf("no curfn for tempAt")
- }
- if curfn.Op() == ir.OCLOSURE {
- ir.Dump("tempAt", curfn)
- base.Fatalf("adding tempAt to wrong closure function")
- }
- if t == nil {
- base.Fatalf("tempAt called with nil type")
- }
-
- s := &types.Sym{
- Name: autotmpname(len(curfn.Dcl)),
- Pkg: types.LocalPkg,
- }
- n := ir.NewNameAt(pos, s)
- s.Def = n
- n.SetType(t)
- n.Class_ = ir.PAUTO
- n.SetEsc(ir.EscNever)
- n.Curfn = curfn
- n.SetUsed(true)
- n.SetAutoTemp(true)
- curfn.Dcl = append(curfn.Dcl, n)
-
- types.CalcSize(t)
-
- return n
-}
-
-func temp(t *types.Type) *ir.Name {
- return tempAt(base.Pos, ir.CurFunc, t)
-}
diff --git a/src/cmd/compile/internal/gc/go.go b/src/cmd/compile/internal/gc/go.go
index a2587b3361..7648e910d5 100644
--- a/src/cmd/compile/internal/gc/go.go
+++ b/src/cmd/compile/internal/gc/go.go
@@ -5,7 +5,6 @@
package gc
import (
- "cmd/compile/internal/ir"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
"cmd/internal/obj"
@@ -14,37 +13,13 @@ import (
var pragcgobuf [][]string
-var decldepth int32
-
-var inimport bool // set during import
-
var zerosize int64
var (
- okforeq [types.NTYPE]bool
- okforadd [types.NTYPE]bool
- okforand [types.NTYPE]bool
- okfornone [types.NTYPE]bool
- okforbool [types.NTYPE]bool
- okforcap [types.NTYPE]bool
- okforlen [types.NTYPE]bool
- okforarith [types.NTYPE]bool
-)
-
-var (
- okfor [ir.OEND][]bool
- iscmp [ir.OEND]bool
-)
-
-var (
funcsymsmu sync.Mutex // protects funcsyms and associated package lookups (see func funcsym)
funcsyms []*types.Sym
)
-var dclcontext ir.Class // PEXTERN/PAUTO
-
-var typecheckok bool
-
// interface to back end
type Arch struct {
diff --git a/src/cmd/compile/internal/gc/gsubr.go b/src/cmd/compile/internal/gc/gsubr.go
index 6ea9b354ab..f24687ec0f 100644
--- a/src/cmd/compile/internal/gc/gsubr.go
+++ b/src/cmd/compile/internal/gc/gsubr.go
@@ -34,6 +34,7 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/ssa"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/objabi"
@@ -196,11 +197,11 @@ func makeABIWrapper(f *ir.Func, wrapperABI obj.ABI) {
// Q: is this needed?
savepos := base.Pos
- savedclcontext := dclcontext
+ savedclcontext := typecheck.DeclContext
savedcurfn := ir.CurFunc
base.Pos = base.AutogeneratedPos
- dclcontext = ir.PEXTERN
+ typecheck.DeclContext = ir.PEXTERN
// At the moment we don't support wrapping a method, we'd need machinery
// below to handle the receiver. Panic if we see this scenario.
@@ -213,11 +214,11 @@ func makeABIWrapper(f *ir.Func, wrapperABI obj.ABI) {
var noReceiver *ir.Field
tfn := ir.NewFuncType(base.Pos,
noReceiver,
- structargs(ft.Params(), true),
- structargs(ft.Results(), false))
+ typecheck.NewFuncParams(ft.Params(), true),
+ typecheck.NewFuncParams(ft.Results(), false))
// Reuse f's types.Sym to create a new ODCLFUNC/function.
- fn := dclfunc(f.Nname.Sym(), tfn)
+ fn := typecheck.DeclFunc(f.Nname.Sym(), tfn)
fn.SetDupok(true)
fn.SetWrapper(true) // ignore frame for panic+recover matching
@@ -281,22 +282,22 @@ func makeABIWrapper(f *ir.Func, wrapperABI obj.ABI) {
}
fn.Body.Append(tail)
- funcbody()
+ typecheck.FinishFuncBody()
if base.Debug.DclStack != 0 {
types.CheckDclstack()
}
- typecheckFunc(fn)
+ typecheck.Func(fn)
ir.CurFunc = fn
- typecheckslice(fn.Body, ctxStmt)
+ typecheck.Stmts(fn.Body)
escapeFuncs([]*ir.Func{fn}, false)
- Target.Decls = append(Target.Decls, fn)
+ typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
// Restore previous context.
base.Pos = savepos
- dclcontext = savedclcontext
+ typecheck.DeclContext = savedclcontext
ir.CurFunc = savedcurfn
}
diff --git a/src/cmd/compile/internal/gc/iexport.go b/src/cmd/compile/internal/gc/iexport.go
deleted file mode 100644
index fd64b69077..0000000000
--- a/src/cmd/compile/internal/gc/iexport.go
+++ /dev/null
@@ -1,1613 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Indexed package export.
-//
-// The indexed export data format is an evolution of the previous
-// binary export data format. Its chief contribution is introducing an
-// index table, which allows efficient random access of individual
-// declarations and inline function bodies. In turn, this allows
-// avoiding unnecessary work for compilation units that import large
-// packages.
-//
-//
-// The top-level data format is structured as:
-//
-// Header struct {
-// Tag byte // 'i'
-// Version uvarint
-// StringSize uvarint
-// DataSize uvarint
-// }
-//
-// Strings [StringSize]byte
-// Data [DataSize]byte
-//
-// MainIndex []struct{
-// PkgPath stringOff
-// PkgName stringOff
-// PkgHeight uvarint
-//
-// Decls []struct{
-// Name stringOff
-// Offset declOff
-// }
-// }
-//
-// Fingerprint [8]byte
-//
-// uvarint means a uint64 written out using uvarint encoding.
-//
-// []T means a uvarint followed by that many T objects. In other
-// words:
-//
-// Len uvarint
-// Elems [Len]T
-//
-// stringOff means a uvarint that indicates an offset within the
-// Strings section. At that offset is another uvarint, followed by
-// that many bytes, which form the string value.
-//
-// declOff means a uvarint that indicates an offset within the Data
-// section where the associated declaration can be found.
-//
-//
-// There are five kinds of declarations, distinguished by their first
-// byte:
-//
-// type Var struct {
-// Tag byte // 'V'
-// Pos Pos
-// Type typeOff
-// }
-//
-// type Func struct {
-// Tag byte // 'F'
-// Pos Pos
-// Signature Signature
-// }
-//
-// type Const struct {
-// Tag byte // 'C'
-// Pos Pos
-// Value Value
-// }
-//
-// type Type struct {
-// Tag byte // 'T'
-// Pos Pos
-// Underlying typeOff
-//
-// Methods []struct{ // omitted if Underlying is an interface type
-// Pos Pos
-// Name stringOff
-// Recv Param
-// Signature Signature
-// }
-// }
-//
-// type Alias struct {
-// Tag byte // 'A'
-// Pos Pos
-// Type typeOff
-// }
-//
-//
-// typeOff means a uvarint that either indicates a predeclared type,
-// or an offset into the Data section. If the uvarint is less than
-// predeclReserved, then it indicates the index into the predeclared
-// types list (see predeclared in bexport.go for order). Otherwise,
-// subtracting predeclReserved yields the offset of a type descriptor.
-//
-// Value means a type and type-specific value. See
-// (*exportWriter).value for details.
-//
-//
-// There are nine kinds of type descriptors, distinguished by an itag:
-//
-// type DefinedType struct {
-// Tag itag // definedType
-// Name stringOff
-// PkgPath stringOff
-// }
-//
-// type PointerType struct {
-// Tag itag // pointerType
-// Elem typeOff
-// }
-//
-// type SliceType struct {
-// Tag itag // sliceType
-// Elem typeOff
-// }
-//
-// type ArrayType struct {
-// Tag itag // arrayType
-// Len uint64
-// Elem typeOff
-// }
-//
-// type ChanType struct {
-// Tag itag // chanType
-// Dir uint64 // 1 RecvOnly; 2 SendOnly; 3 SendRecv
-// Elem typeOff
-// }
-//
-// type MapType struct {
-// Tag itag // mapType
-// Key typeOff
-// Elem typeOff
-// }
-//
-// type FuncType struct {
-// Tag itag // signatureType
-// PkgPath stringOff
-// Signature Signature
-// }
-//
-// type StructType struct {
-// Tag itag // structType
-// PkgPath stringOff
-// Fields []struct {
-// Pos Pos
-// Name stringOff
-// Type typeOff
-// Embedded bool
-// Note stringOff
-// }
-// }
-//
-// type InterfaceType struct {
-// Tag itag // interfaceType
-// PkgPath stringOff
-// Embeddeds []struct {
-// Pos Pos
-// Type typeOff
-// }
-// Methods []struct {
-// Pos Pos
-// Name stringOff
-// Signature Signature
-// }
-// }
-//
-//
-// type Signature struct {
-// Params []Param
-// Results []Param
-// Variadic bool // omitted if Results is empty
-// }
-//
-// type Param struct {
-// Pos Pos
-// Name stringOff
-// Type typOff
-// }
-//
-//
-// Pos encodes a file:line:column triple, incorporating a simple delta
-// encoding scheme within a data object. See exportWriter.pos for
-// details.
-//
-//
-// Compiler-specific details.
-//
-// cmd/compile writes out a second index for inline bodies and also
-// appends additional compiler-specific details after declarations.
-// Third-party tools are not expected to depend on these details and
-// they're expected to change much more rapidly, so they're omitted
-// here. See exportWriter's varExt/funcExt/etc methods for details.
-
-package gc
-
-import (
- "bufio"
- "bytes"
- "cmd/compile/internal/base"
- "cmd/compile/internal/ir"
- "cmd/compile/internal/types"
- "cmd/internal/goobj"
- "cmd/internal/src"
- "crypto/md5"
- "encoding/binary"
- "fmt"
- "go/constant"
- "io"
- "math/big"
- "sort"
- "strings"
-)
-
-// Current indexed export format version. Increase with each format change.
-// 1: added column details to Pos
-// 0: Go1.11 encoding
-const iexportVersion = 1
-
-// predeclReserved is the number of type offsets reserved for types
-// implicitly declared in the universe block.
-const predeclReserved = 32
-
-// An itag distinguishes the kind of type that was written into the
-// indexed export format.
-type itag uint64
-
-const (
- // Types
- definedType itag = iota
- pointerType
- sliceType
- arrayType
- chanType
- mapType
- signatureType
- structType
- interfaceType
-)
-
-func iexport(out *bufio.Writer) {
- p := iexporter{
- allPkgs: map[*types.Pkg]bool{},
- stringIndex: map[string]uint64{},
- declIndex: map[*types.Sym]uint64{},
- inlineIndex: map[*types.Sym]uint64{},
- typIndex: map[*types.Type]uint64{},
- }
-
- for i, pt := range predeclared() {
- p.typIndex[pt] = uint64(i)
- }
- if len(p.typIndex) > predeclReserved {
- base.Fatalf("too many predeclared types: %d > %d", len(p.typIndex), predeclReserved)
- }
-
- // Initialize work queue with exported declarations.
- for _, n := range Target.Exports {
- p.pushDecl(n)
- }
-
- // Loop until no more work. We use a queue because while
- // writing out inline bodies, we may discover additional
- // declarations that are needed.
- for !p.declTodo.Empty() {
- p.doDecl(p.declTodo.PopLeft())
- }
-
- // Append indices to data0 section.
- dataLen := uint64(p.data0.Len())
- w := p.newWriter()
- w.writeIndex(p.declIndex, true)
- w.writeIndex(p.inlineIndex, false)
- w.flush()
-
- if *base.Flag.LowerV {
- fmt.Printf("export: hdr strings %v, data %v, index %v\n", p.strings.Len(), dataLen, p.data0.Len())
- }
-
- // Assemble header.
- var hdr intWriter
- hdr.WriteByte('i')
- hdr.uint64(iexportVersion)
- hdr.uint64(uint64(p.strings.Len()))
- hdr.uint64(dataLen)
-
- // Flush output.
- h := md5.New()
- wr := io.MultiWriter(out, h)
- io.Copy(wr, &hdr)
- io.Copy(wr, &p.strings)
- io.Copy(wr, &p.data0)
-
- // Add fingerprint (used by linker object file).
- // Attach this to the end, so tools (e.g. gcimporter) don't care.
- copy(base.Ctxt.Fingerprint[:], h.Sum(nil)[:])
- out.Write(base.Ctxt.Fingerprint[:])
-}
-
-// writeIndex writes out a symbol index. mainIndex indicates whether
-// we're writing out the main index, which is also read by
-// non-compiler tools and includes a complete package description
-// (i.e., name and height).
-func (w *exportWriter) writeIndex(index map[*types.Sym]uint64, mainIndex bool) {
- // Build a map from packages to symbols from that package.
- pkgSyms := map[*types.Pkg][]*types.Sym{}
-
- // For the main index, make sure to include every package that
- // we reference, even if we're not exporting (or reexporting)
- // any symbols from it.
- if mainIndex {
- pkgSyms[types.LocalPkg] = nil
- for pkg := range w.p.allPkgs {
- pkgSyms[pkg] = nil
- }
- }
-
- // Group symbols by package.
- for sym := range index {
- pkgSyms[sym.Pkg] = append(pkgSyms[sym.Pkg], sym)
- }
-
- // Sort packages by path.
- var pkgs []*types.Pkg
- for pkg := range pkgSyms {
- pkgs = append(pkgs, pkg)
- }
- sort.Slice(pkgs, func(i, j int) bool {
- return pkgs[i].Path < pkgs[j].Path
- })
-
- w.uint64(uint64(len(pkgs)))
- for _, pkg := range pkgs {
- w.string(pkg.Path)
- if mainIndex {
- w.string(pkg.Name)
- w.uint64(uint64(pkg.Height))
- }
-
- // Sort symbols within a package by name.
- syms := pkgSyms[pkg]
- sort.Slice(syms, func(i, j int) bool {
- return syms[i].Name < syms[j].Name
- })
-
- w.uint64(uint64(len(syms)))
- for _, sym := range syms {
- w.string(sym.Name)
- w.uint64(index[sym])
- }
- }
-}
-
-type iexporter struct {
- // allPkgs tracks all packages that have been referenced by
- // the export data, so we can ensure to include them in the
- // main index.
- allPkgs map[*types.Pkg]bool
-
- declTodo ir.NameQueue
-
- strings intWriter
- stringIndex map[string]uint64
-
- data0 intWriter
- declIndex map[*types.Sym]uint64
- inlineIndex map[*types.Sym]uint64
- typIndex map[*types.Type]uint64
-}
-
-// stringOff returns the offset of s within the string section.
-// If not already present, it's added to the end.
-func (p *iexporter) stringOff(s string) uint64 {
- off, ok := p.stringIndex[s]
- if !ok {
- off = uint64(p.strings.Len())
- p.stringIndex[s] = off
-
- if *base.Flag.LowerV {
- fmt.Printf("export: str %v %.40q\n", off, s)
- }
-
- p.strings.uint64(uint64(len(s)))
- p.strings.WriteString(s)
- }
- return off
-}
-
-// pushDecl adds n to the declaration work queue, if not already present.
-func (p *iexporter) pushDecl(n *ir.Name) {
- if n.Sym() == nil || n.Sym().Def != n && n.Op() != ir.OTYPE {
- base.Fatalf("weird Sym: %v, %v", n, n.Sym())
- }
-
- // Don't export predeclared declarations.
- if n.Sym().Pkg == types.BuiltinPkg || n.Sym().Pkg == ir.Pkgs.Unsafe {
- return
- }
-
- if _, ok := p.declIndex[n.Sym()]; ok {
- return
- }
-
- p.declIndex[n.Sym()] = ^uint64(0) // mark n present in work queue
- p.declTodo.PushRight(n)
-}
-
-// exportWriter handles writing out individual data section chunks.
-type exportWriter struct {
- p *iexporter
-
- data intWriter
- currPkg *types.Pkg
- prevFile string
- prevLine int64
- prevColumn int64
-}
-
-func (p *iexporter) doDecl(n *ir.Name) {
- w := p.newWriter()
- w.setPkg(n.Sym().Pkg, false)
-
- switch n.Op() {
- case ir.ONAME:
- switch n.Class_ {
- case ir.PEXTERN:
- // Variable.
- w.tag('V')
- w.pos(n.Pos())
- w.typ(n.Type())
- w.varExt(n)
-
- case ir.PFUNC:
- if ir.IsMethod(n) {
- base.Fatalf("unexpected method: %v", n)
- }
-
- // Function.
- w.tag('F')
- w.pos(n.Pos())
- w.signature(n.Type())
- w.funcExt(n)
-
- default:
- base.Fatalf("unexpected class: %v, %v", n, n.Class_)
- }
-
- case ir.OLITERAL:
- // Constant.
- // TODO(mdempsky): Do we still need this typecheck? If so, why?
- n = typecheck(n, ctxExpr).(*ir.Name)
- w.tag('C')
- w.pos(n.Pos())
- w.value(n.Type(), n.Val())
-
- case ir.OTYPE:
- if types.IsDotAlias(n.Sym()) {
- // Alias.
- w.tag('A')
- w.pos(n.Pos())
- w.typ(n.Type())
- break
- }
-
- // Defined type.
- w.tag('T')
- w.pos(n.Pos())
-
- underlying := n.Type().Underlying()
- if underlying == types.ErrorType.Underlying() {
- // For "type T error", use error as the
- // underlying type instead of error's own
- // underlying anonymous interface. This
- // ensures consistency with how importers may
- // declare error (e.g., go/types uses nil Pkg
- // for predeclared objects).
- underlying = types.ErrorType
- }
- w.typ(underlying)
-
- t := n.Type()
- if t.IsInterface() {
- w.typeExt(t)
- break
- }
-
- ms := t.Methods()
- w.uint64(uint64(ms.Len()))
- for _, m := range ms.Slice() {
- w.pos(m.Pos)
- w.selector(m.Sym)
- w.param(m.Type.Recv())
- w.signature(m.Type)
- }
-
- w.typeExt(t)
- for _, m := range ms.Slice() {
- w.methExt(m)
- }
-
- default:
- base.Fatalf("unexpected node: %v", n)
- }
-
- w.finish("dcl", p.declIndex, n.Sym())
-}
-
-func (w *exportWriter) tag(tag byte) {
- w.data.WriteByte(tag)
-}
-
-func (w *exportWriter) finish(what string, index map[*types.Sym]uint64, sym *types.Sym) {
- off := w.flush()
- if *base.Flag.LowerV {
- fmt.Printf("export: %v %v %v\n", what, off, sym)
- }
- index[sym] = off
-}
-
-func (p *iexporter) doInline(f *ir.Name) {
- w := p.newWriter()
- w.setPkg(fnpkg(f), false)
-
- w.stmtList(ir.Nodes(f.Func.Inl.Body))
-
- w.finish("inl", p.inlineIndex, f.Sym())
-}
-
-func (w *exportWriter) pos(pos src.XPos) {
- p := base.Ctxt.PosTable.Pos(pos)
- file := p.Base().AbsFilename()
- line := int64(p.RelLine())
- column := int64(p.RelCol())
-
- // Encode position relative to the last position: column
- // delta, then line delta, then file name. We reserve the
- // bottom bit of the column and line deltas to encode whether
- // the remaining fields are present.
- //
- // Note: Because data objects may be read out of order (or not
- // at all), we can only apply delta encoding within a single
- // object. This is handled implicitly by tracking prevFile,
- // prevLine, and prevColumn as fields of exportWriter.
-
- deltaColumn := (column - w.prevColumn) << 1
- deltaLine := (line - w.prevLine) << 1
-
- if file != w.prevFile {
- deltaLine |= 1
- }
- if deltaLine != 0 {
- deltaColumn |= 1
- }
-
- w.int64(deltaColumn)
- if deltaColumn&1 != 0 {
- w.int64(deltaLine)
- if deltaLine&1 != 0 {
- w.string(file)
- }
- }
-
- w.prevFile = file
- w.prevLine = line
- w.prevColumn = column
-}
-
-func (w *exportWriter) pkg(pkg *types.Pkg) {
- // Ensure any referenced packages are declared in the main index.
- w.p.allPkgs[pkg] = true
-
- w.string(pkg.Path)
-}
-
-func (w *exportWriter) qualifiedIdent(n ir.Node) {
- // Ensure any referenced declarations are written out too.
- w.p.pushDecl(n.Name())
-
- s := n.Sym()
- w.string(s.Name)
- w.pkg(s.Pkg)
-}
-
-func (w *exportWriter) selector(s *types.Sym) {
- if w.currPkg == nil {
- base.Fatalf("missing currPkg")
- }
-
- // Method selectors are rewritten into method symbols (of the
- // form T.M) during typechecking, but we want to write out
- // just the bare method name.
- name := s.Name
- if i := strings.LastIndex(name, "."); i >= 0 {
- name = name[i+1:]
- } else {
- pkg := w.currPkg
- if types.IsExported(name) {
- pkg = types.LocalPkg
- }
- if s.Pkg != pkg {
- base.Fatalf("package mismatch in selector: %v in package %q, but want %q", s, s.Pkg.Path, pkg.Path)
- }
- }
-
- w.string(name)
-}
-
-func (w *exportWriter) typ(t *types.Type) {
- w.data.uint64(w.p.typOff(t))
-}
-
-func (p *iexporter) newWriter() *exportWriter {
- return &exportWriter{p: p}
-}
-
-func (w *exportWriter) flush() uint64 {
- off := uint64(w.p.data0.Len())
- io.Copy(&w.p.data0, &w.data)
- return off
-}
-
-func (p *iexporter) typOff(t *types.Type) uint64 {
- off, ok := p.typIndex[t]
- if !ok {
- w := p.newWriter()
- w.doTyp(t)
- rawOff := w.flush()
- if *base.Flag.LowerV {
- fmt.Printf("export: typ %v %v\n", rawOff, t)
- }
- off = predeclReserved + rawOff
- p.typIndex[t] = off
- }
- return off
-}
-
-func (w *exportWriter) startType(k itag) {
- w.data.uint64(uint64(k))
-}
-
-func (w *exportWriter) doTyp(t *types.Type) {
- if t.Sym() != nil {
- if t.Sym().Pkg == types.BuiltinPkg || t.Sym().Pkg == ir.Pkgs.Unsafe {
- base.Fatalf("builtin type missing from typIndex: %v", t)
- }
-
- w.startType(definedType)
- w.qualifiedIdent(t.Obj().(*ir.Name))
- return
- }
-
- switch t.Kind() {
- case types.TPTR:
- w.startType(pointerType)
- w.typ(t.Elem())
-
- case types.TSLICE:
- w.startType(sliceType)
- w.typ(t.Elem())
-
- case types.TARRAY:
- w.startType(arrayType)
- w.uint64(uint64(t.NumElem()))
- w.typ(t.Elem())
-
- case types.TCHAN:
- w.startType(chanType)
- w.uint64(uint64(t.ChanDir()))
- w.typ(t.Elem())
-
- case types.TMAP:
- w.startType(mapType)
- w.typ(t.Key())
- w.typ(t.Elem())
-
- case types.TFUNC:
- w.startType(signatureType)
- w.setPkg(t.Pkg(), true)
- w.signature(t)
-
- case types.TSTRUCT:
- w.startType(structType)
- w.setPkg(t.Pkg(), true)
-
- w.uint64(uint64(t.NumFields()))
- for _, f := range t.FieldSlice() {
- w.pos(f.Pos)
- w.selector(f.Sym)
- w.typ(f.Type)
- w.bool(f.Embedded != 0)
- w.string(f.Note)
- }
-
- case types.TINTER:
- var embeddeds, methods []*types.Field
- for _, m := range t.Methods().Slice() {
- if m.Sym != nil {
- methods = append(methods, m)
- } else {
- embeddeds = append(embeddeds, m)
- }
- }
-
- w.startType(interfaceType)
- w.setPkg(t.Pkg(), true)
-
- w.uint64(uint64(len(embeddeds)))
- for _, f := range embeddeds {
- w.pos(f.Pos)
- w.typ(f.Type)
- }
-
- w.uint64(uint64(len(methods)))
- for _, f := range methods {
- w.pos(f.Pos)
- w.selector(f.Sym)
- w.signature(f.Type)
- }
-
- default:
- base.Fatalf("unexpected type: %v", t)
- }
-}
-
-func (w *exportWriter) setPkg(pkg *types.Pkg, write bool) {
- if pkg == types.NoPkg {
- base.Fatalf("missing pkg")
- }
-
- if write {
- w.pkg(pkg)
- }
-
- w.currPkg = pkg
-}
-
-func (w *exportWriter) signature(t *types.Type) {
- w.paramList(t.Params().FieldSlice())
- w.paramList(t.Results().FieldSlice())
- if n := t.Params().NumFields(); n > 0 {
- w.bool(t.Params().Field(n - 1).IsDDD())
- }
-}
-
-func (w *exportWriter) paramList(fs []*types.Field) {
- w.uint64(uint64(len(fs)))
- for _, f := range fs {
- w.param(f)
- }
-}
-
-func (w *exportWriter) param(f *types.Field) {
- w.pos(f.Pos)
- w.localIdent(types.OrigSym(f.Sym), 0)
- w.typ(f.Type)
-}
-
-func constTypeOf(typ *types.Type) constant.Kind {
- switch typ {
- case types.UntypedInt, types.UntypedRune:
- return constant.Int
- case types.UntypedFloat:
- return constant.Float
- case types.UntypedComplex:
- return constant.Complex
- }
-
- switch typ.Kind() {
- case types.TBOOL:
- return constant.Bool
- case types.TSTRING:
- return constant.String
- case types.TINT, types.TINT8, types.TINT16, types.TINT32, types.TINT64,
- types.TUINT, types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINTPTR:
- return constant.Int
- case types.TFLOAT32, types.TFLOAT64:
- return constant.Float
- case types.TCOMPLEX64, types.TCOMPLEX128:
- return constant.Complex
- }
-
- base.Fatalf("unexpected constant type: %v", typ)
- return 0
-}
-
-func (w *exportWriter) value(typ *types.Type, v constant.Value) {
- ir.AssertValidTypeForConst(typ, v)
- w.typ(typ)
-
- // Each type has only one admissible constant representation,
- // so we could type switch directly on v.U here. However,
- // switching on the type increases symmetry with import logic
- // and provides a useful consistency check.
-
- switch constTypeOf(typ) {
- case constant.Bool:
- w.bool(constant.BoolVal(v))
- case constant.String:
- w.string(constant.StringVal(v))
- case constant.Int:
- w.mpint(v, typ)
- case constant.Float:
- w.mpfloat(v, typ)
- case constant.Complex:
- w.mpfloat(constant.Real(v), typ)
- w.mpfloat(constant.Imag(v), typ)
- }
-}
-
-func intSize(typ *types.Type) (signed bool, maxBytes uint) {
- if typ.IsUntyped() {
- return true, ir.ConstPrec / 8
- }
-
- switch typ.Kind() {
- case types.TFLOAT32, types.TCOMPLEX64:
- return true, 3
- case types.TFLOAT64, types.TCOMPLEX128:
- return true, 7
- }
-
- signed = typ.IsSigned()
- maxBytes = uint(typ.Size())
-
- // The go/types API doesn't expose sizes to importers, so they
- // don't know how big these types are.
- switch typ.Kind() {
- case types.TINT, types.TUINT, types.TUINTPTR:
- maxBytes = 8
- }
-
- return
-}
-
-// mpint exports a multi-precision integer.
-//
-// For unsigned types, small values are written out as a single
-// byte. Larger values are written out as a length-prefixed big-endian
-// byte string, where the length prefix is encoded as its complement.
-// For example, bytes 0, 1, and 2 directly represent the integer
-// values 0, 1, and 2; while bytes 255, 254, and 253 indicate a 1-,
-// 2-, and 3-byte big-endian string follow.
-//
-// Encoding for signed types use the same general approach as for
-// unsigned types, except small values use zig-zag encoding and the
-// bottom bit of length prefix byte for large values is reserved as a
-// sign bit.
-//
-// The exact boundary between small and large encodings varies
-// according to the maximum number of bytes needed to encode a value
-// of type typ. As a special case, 8-bit types are always encoded as a
-// single byte.
-//
-// TODO(mdempsky): Is this level of complexity really worthwhile?
-func (w *exportWriter) mpint(x constant.Value, typ *types.Type) {
- signed, maxBytes := intSize(typ)
-
- negative := constant.Sign(x) < 0
- if !signed && negative {
- base.Fatalf("negative unsigned integer; type %v, value %v", typ, x)
- }
-
- b := constant.Bytes(x) // little endian
- for i, j := 0, len(b)-1; i < j; i, j = i+1, j-1 {
- b[i], b[j] = b[j], b[i]
- }
-
- if len(b) > 0 && b[0] == 0 {
- base.Fatalf("leading zeros")
- }
- if uint(len(b)) > maxBytes {
- base.Fatalf("bad mpint length: %d > %d (type %v, value %v)", len(b), maxBytes, typ, x)
- }
-
- maxSmall := 256 - maxBytes
- if signed {
- maxSmall = 256 - 2*maxBytes
- }
- if maxBytes == 1 {
- maxSmall = 256
- }
-
- // Check if x can use small value encoding.
- if len(b) <= 1 {
- var ux uint
- if len(b) == 1 {
- ux = uint(b[0])
- }
- if signed {
- ux <<= 1
- if negative {
- ux--
- }
- }
- if ux < maxSmall {
- w.data.WriteByte(byte(ux))
- return
- }
- }
-
- n := 256 - uint(len(b))
- if signed {
- n = 256 - 2*uint(len(b))
- if negative {
- n |= 1
- }
- }
- if n < maxSmall || n >= 256 {
- base.Fatalf("encoding mistake: %d, %v, %v => %d", len(b), signed, negative, n)
- }
-
- w.data.WriteByte(byte(n))
- w.data.Write(b)
-}
-
-// mpfloat exports a multi-precision floating point number.
-//
-// The number's value is decomposed into mantissa × 2**exponent, where
-// mantissa is an integer. The value is written out as mantissa (as a
-// multi-precision integer) and then the exponent, except exponent is
-// omitted if mantissa is zero.
-func (w *exportWriter) mpfloat(v constant.Value, typ *types.Type) {
- f := ir.BigFloat(v)
- if f.IsInf() {
- base.Fatalf("infinite constant")
- }
-
- // Break into f = mant × 2**exp, with 0.5 <= mant < 1.
- var mant big.Float
- exp := int64(f.MantExp(&mant))
-
- // Scale so that mant is an integer.
- prec := mant.MinPrec()
- mant.SetMantExp(&mant, int(prec))
- exp -= int64(prec)
-
- manti, acc := mant.Int(nil)
- if acc != big.Exact {
- base.Fatalf("mantissa scaling failed for %f (%s)", f, acc)
- }
- w.mpint(makeInt(manti), typ)
- if manti.Sign() != 0 {
- w.int64(exp)
- }
-}
-
-func (w *exportWriter) bool(b bool) bool {
- var x uint64
- if b {
- x = 1
- }
- w.uint64(x)
- return b
-}
-
-func (w *exportWriter) int64(x int64) { w.data.int64(x) }
-func (w *exportWriter) uint64(x uint64) { w.data.uint64(x) }
-func (w *exportWriter) string(s string) { w.uint64(w.p.stringOff(s)) }
-
-// Compiler-specific extensions.
-
-func (w *exportWriter) varExt(n ir.Node) {
- w.linkname(n.Sym())
- w.symIdx(n.Sym())
-}
-
-func (w *exportWriter) funcExt(n *ir.Name) {
- w.linkname(n.Sym())
- w.symIdx(n.Sym())
-
- // Escape analysis.
- for _, fs := range &types.RecvsParams {
- for _, f := range fs(n.Type()).FieldSlice() {
- w.string(f.Note)
- }
- }
-
- // Inline body.
- if n.Func.Inl != nil {
- w.uint64(1 + uint64(n.Func.Inl.Cost))
- if n.Func.ExportInline() {
- w.p.doInline(n)
- }
-
- // Endlineno for inlined function.
- w.pos(n.Func.Endlineno)
- } else {
- w.uint64(0)
- }
-}
-
-func (w *exportWriter) methExt(m *types.Field) {
- w.bool(m.Nointerface())
- w.funcExt(m.Nname.(*ir.Name))
-}
-
-func (w *exportWriter) linkname(s *types.Sym) {
- w.string(s.Linkname)
-}
-
-func (w *exportWriter) symIdx(s *types.Sym) {
- lsym := s.Linksym()
- if lsym.PkgIdx > goobj.PkgIdxSelf || (lsym.PkgIdx == goobj.PkgIdxInvalid && !lsym.Indexed()) || s.Linkname != "" {
- // Don't export index for non-package symbols, linkname'd symbols,
- // and symbols without an index. They can only be referenced by
- // name.
- w.int64(-1)
- } else {
- // For a defined symbol, export its index.
- // For re-exporting an imported symbol, pass its index through.
- w.int64(int64(lsym.SymIdx))
- }
-}
-
-func (w *exportWriter) typeExt(t *types.Type) {
- // Export whether this type is marked notinheap.
- w.bool(t.NotInHeap())
- // For type T, export the index of type descriptor symbols of T and *T.
- if i, ok := typeSymIdx[t]; ok {
- w.int64(i[0])
- w.int64(i[1])
- return
- }
- w.symIdx(types.TypeSym(t))
- w.symIdx(types.TypeSym(t.PtrTo()))
-}
-
-// Inline bodies.
-
-func (w *exportWriter) stmtList(list ir.Nodes) {
- for _, n := range list {
- w.node(n)
- }
- w.op(ir.OEND)
-}
-
-func (w *exportWriter) node(n ir.Node) {
- if ir.OpPrec[n.Op()] < 0 {
- w.stmt(n)
- } else {
- w.expr(n)
- }
-}
-
-// Caution: stmt will emit more than one node for statement nodes n that have a non-empty
-// n.Ninit and where n cannot have a natural init section (such as in "if", "for", etc.).
-func (w *exportWriter) stmt(n ir.Node) {
- if len(n.Init()) > 0 && !ir.StmtWithInit(n.Op()) {
- // can't use stmtList here since we don't want the final OEND
- for _, n := range n.Init() {
- w.stmt(n)
- }
- }
-
- switch n.Op() {
- case ir.OBLOCK:
- // No OBLOCK in export data.
- // Inline content into this statement list,
- // like the init list above.
- // (At the moment neither the parser nor the typechecker
- // generate OBLOCK nodes except to denote an empty
- // function body, although that may change.)
- n := n.(*ir.BlockStmt)
- for _, n := range n.List {
- w.stmt(n)
- }
-
- case ir.ODCL:
- n := n.(*ir.Decl)
- w.op(ir.ODCL)
- w.pos(n.X.Pos())
- w.localName(n.X.(*ir.Name))
- w.typ(n.X.Type())
-
- case ir.OAS:
- // Don't export "v = <N>" initializing statements, hope they're always
- // preceded by the DCL which will be re-parsed and typecheck to reproduce
- // the "v = <N>" again.
- n := n.(*ir.AssignStmt)
- if n.Y != nil {
- w.op(ir.OAS)
- w.pos(n.Pos())
- w.expr(n.X)
- w.expr(n.Y)
- }
-
- case ir.OASOP:
- n := n.(*ir.AssignOpStmt)
- w.op(ir.OASOP)
- w.pos(n.Pos())
- w.op(n.AsOp)
- w.expr(n.X)
- if w.bool(!n.IncDec) {
- w.expr(n.Y)
- }
-
- case ir.OAS2, ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2MAPR, ir.OAS2RECV:
- n := n.(*ir.AssignListStmt)
- w.op(ir.OAS2)
- w.pos(n.Pos())
- w.exprList(n.Lhs)
- w.exprList(n.Rhs)
-
- case ir.ORETURN:
- n := n.(*ir.ReturnStmt)
- w.op(ir.ORETURN)
- w.pos(n.Pos())
- w.exprList(n.Results)
-
- // case ORETJMP:
- // unreachable - generated by compiler for trampolin routines
-
- case ir.OGO, ir.ODEFER:
- n := n.(*ir.GoDeferStmt)
- w.op(n.Op())
- w.pos(n.Pos())
- w.expr(n.Call)
-
- case ir.OIF:
- n := n.(*ir.IfStmt)
- w.op(ir.OIF)
- w.pos(n.Pos())
- w.stmtList(n.Init())
- w.expr(n.Cond)
- w.stmtList(n.Body)
- w.stmtList(n.Else)
-
- case ir.OFOR:
- n := n.(*ir.ForStmt)
- w.op(ir.OFOR)
- w.pos(n.Pos())
- w.stmtList(n.Init())
- w.exprsOrNil(n.Cond, n.Post)
- w.stmtList(n.Body)
-
- case ir.ORANGE:
- n := n.(*ir.RangeStmt)
- w.op(ir.ORANGE)
- w.pos(n.Pos())
- w.stmtList(n.Vars)
- w.expr(n.X)
- w.stmtList(n.Body)
-
- case ir.OSELECT:
- n := n.(*ir.SelectStmt)
- w.op(n.Op())
- w.pos(n.Pos())
- w.stmtList(n.Init())
- w.exprsOrNil(nil, nil) // TODO(rsc): Delete (and fix importer).
- w.caseList(n)
-
- case ir.OSWITCH:
- n := n.(*ir.SwitchStmt)
- w.op(n.Op())
- w.pos(n.Pos())
- w.stmtList(n.Init())
- w.exprsOrNil(n.Tag, nil)
- w.caseList(n)
-
- // case OCASE:
- // handled by caseList
-
- case ir.OFALL:
- n := n.(*ir.BranchStmt)
- w.op(ir.OFALL)
- w.pos(n.Pos())
-
- case ir.OBREAK, ir.OCONTINUE, ir.OGOTO, ir.OLABEL:
- w.op(n.Op())
- w.pos(n.Pos())
- label := ""
- if sym := n.Sym(); sym != nil {
- label = sym.Name
- }
- w.string(label)
-
- default:
- base.Fatalf("exporter: CANNOT EXPORT: %v\nPlease notify gri@\n", n.Op())
- }
-}
-
-func isNamedTypeSwitch(n ir.Node) bool {
- if n.Op() != ir.OSWITCH {
- return false
- }
- sw := n.(*ir.SwitchStmt)
- if sw.Tag == nil || sw.Tag.Op() != ir.OTYPESW {
- return false
- }
- guard := sw.Tag.(*ir.TypeSwitchGuard)
- return guard.Tag != nil
-}
-
-func (w *exportWriter) caseList(sw ir.Node) {
- namedTypeSwitch := isNamedTypeSwitch(sw)
-
- var cases []ir.Node
- if sw.Op() == ir.OSWITCH {
- cases = sw.(*ir.SwitchStmt).Cases
- } else {
- cases = sw.(*ir.SelectStmt).Cases
- }
- w.uint64(uint64(len(cases)))
- for _, cas := range cases {
- cas := cas.(*ir.CaseStmt)
- w.pos(cas.Pos())
- w.stmtList(cas.List)
- if namedTypeSwitch {
- w.localName(cas.Vars[0].(*ir.Name))
- }
- w.stmtList(cas.Body)
- }
-}
-
-func (w *exportWriter) exprList(list ir.Nodes) {
- for _, n := range list {
- w.expr(n)
- }
- w.op(ir.OEND)
-}
-
-func simplifyForExport(n ir.Node) ir.Node {
- switch n.Op() {
- case ir.OPAREN:
- n := n.(*ir.ParenExpr)
- return simplifyForExport(n.X)
- case ir.ODEREF:
- n := n.(*ir.StarExpr)
- if n.Implicit() {
- return simplifyForExport(n.X)
- }
- case ir.OADDR:
- n := n.(*ir.AddrExpr)
- if n.Implicit() {
- return simplifyForExport(n.X)
- }
- case ir.ODOT, ir.ODOTPTR:
- n := n.(*ir.SelectorExpr)
- if n.Implicit() {
- return simplifyForExport(n.X)
- }
- }
- return n
-}
-
-func (w *exportWriter) expr(n ir.Node) {
- n = simplifyForExport(n)
- switch n.Op() {
- // expressions
- // (somewhat closely following the structure of exprfmt in fmt.go)
- case ir.ONIL:
- n := n.(*ir.NilExpr)
- if !n.Type().HasNil() {
- base.Fatalf("unexpected type for nil: %v", n.Type())
- }
- w.op(ir.ONIL)
- w.pos(n.Pos())
- w.typ(n.Type())
-
- case ir.OLITERAL:
- w.op(ir.OLITERAL)
- w.pos(n.Pos())
- w.value(n.Type(), n.Val())
-
- case ir.OMETHEXPR:
- // Special case: explicit name of func (*T) method(...) is turned into pkg.(*T).method,
- // but for export, this should be rendered as (*pkg.T).meth.
- // These nodes have the special property that they are names with a left OTYPE and a right ONAME.
- n := n.(*ir.MethodExpr)
- w.op(ir.OXDOT)
- w.pos(n.Pos())
- w.op(ir.OTYPE)
- w.typ(n.T) // n.Left.Op == OTYPE
- w.selector(n.Method.Sym)
-
- case ir.ONAME:
- // Package scope name.
- n := n.(*ir.Name)
- if (n.Class_ == ir.PEXTERN || n.Class_ == ir.PFUNC) && !ir.IsBlank(n) {
- w.op(ir.ONONAME)
- w.qualifiedIdent(n)
- break
- }
-
- // Function scope name.
- w.op(ir.ONAME)
- w.localName(n)
-
- // case OPACK, ONONAME:
- // should have been resolved by typechecking - handled by default case
-
- case ir.OTYPE:
- w.op(ir.OTYPE)
- w.typ(n.Type())
-
- case ir.OTYPESW:
- n := n.(*ir.TypeSwitchGuard)
- w.op(ir.OTYPESW)
- w.pos(n.Pos())
- var s *types.Sym
- if n.Tag != nil {
- if n.Tag.Op() != ir.ONONAME {
- base.Fatalf("expected ONONAME, got %v", n.Tag)
- }
- s = n.Tag.Sym()
- }
- w.localIdent(s, 0) // declared pseudo-variable, if any
- w.exprsOrNil(n.X, nil)
-
- // case OTARRAY, OTMAP, OTCHAN, OTSTRUCT, OTINTER, OTFUNC:
- // should have been resolved by typechecking - handled by default case
-
- // case OCLOSURE:
- // unimplemented - handled by default case
-
- // case OCOMPLIT:
- // should have been resolved by typechecking - handled by default case
-
- case ir.OPTRLIT:
- n := n.(*ir.AddrExpr)
- w.op(ir.OADDR)
- w.pos(n.Pos())
- w.expr(n.X)
-
- case ir.OSTRUCTLIT:
- n := n.(*ir.CompLitExpr)
- w.op(ir.OSTRUCTLIT)
- w.pos(n.Pos())
- w.typ(n.Type())
- w.fieldList(n.List) // special handling of field names
-
- case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT:
- n := n.(*ir.CompLitExpr)
- w.op(ir.OCOMPLIT)
- w.pos(n.Pos())
- w.typ(n.Type())
- w.exprList(n.List)
-
- case ir.OKEY:
- n := n.(*ir.KeyExpr)
- w.op(ir.OKEY)
- w.pos(n.Pos())
- w.exprsOrNil(n.Key, n.Value)
-
- // case OSTRUCTKEY:
- // unreachable - handled in case OSTRUCTLIT by elemList
-
- case ir.OCALLPART:
- // An OCALLPART is an OXDOT before type checking.
- n := n.(*ir.CallPartExpr)
- w.op(ir.OXDOT)
- w.pos(n.Pos())
- w.expr(n.X)
- w.selector(n.Method.Sym)
-
- case ir.OXDOT, ir.ODOT, ir.ODOTPTR, ir.ODOTINTER, ir.ODOTMETH:
- n := n.(*ir.SelectorExpr)
- w.op(ir.OXDOT)
- w.pos(n.Pos())
- w.expr(n.X)
- w.selector(n.Sel)
-
- case ir.ODOTTYPE, ir.ODOTTYPE2:
- n := n.(*ir.TypeAssertExpr)
- w.op(ir.ODOTTYPE)
- w.pos(n.Pos())
- w.expr(n.X)
- w.typ(n.Type())
-
- case ir.OINDEX, ir.OINDEXMAP:
- n := n.(*ir.IndexExpr)
- w.op(ir.OINDEX)
- w.pos(n.Pos())
- w.expr(n.X)
- w.expr(n.Index)
-
- case ir.OSLICE, ir.OSLICESTR, ir.OSLICEARR:
- n := n.(*ir.SliceExpr)
- w.op(ir.OSLICE)
- w.pos(n.Pos())
- w.expr(n.X)
- low, high, _ := n.SliceBounds()
- w.exprsOrNil(low, high)
-
- case ir.OSLICE3, ir.OSLICE3ARR:
- n := n.(*ir.SliceExpr)
- w.op(ir.OSLICE3)
- w.pos(n.Pos())
- w.expr(n.X)
- low, high, max := n.SliceBounds()
- w.exprsOrNil(low, high)
- w.expr(max)
-
- case ir.OCOPY, ir.OCOMPLEX:
- // treated like other builtin calls (see e.g., OREAL)
- n := n.(*ir.BinaryExpr)
- w.op(n.Op())
- w.pos(n.Pos())
- w.expr(n.X)
- w.expr(n.Y)
- w.op(ir.OEND)
-
- case ir.OCONV, ir.OCONVIFACE, ir.OCONVNOP, ir.OBYTES2STR, ir.ORUNES2STR, ir.OSTR2BYTES, ir.OSTR2RUNES, ir.ORUNESTR:
- n := n.(*ir.ConvExpr)
- w.op(ir.OCONV)
- w.pos(n.Pos())
- w.expr(n.X)
- w.typ(n.Type())
-
- case ir.OREAL, ir.OIMAG, ir.OCAP, ir.OCLOSE, ir.OLEN, ir.ONEW, ir.OPANIC:
- n := n.(*ir.UnaryExpr)
- w.op(n.Op())
- w.pos(n.Pos())
- w.expr(n.X)
- w.op(ir.OEND)
-
- case ir.OAPPEND, ir.ODELETE, ir.ORECOVER, ir.OPRINT, ir.OPRINTN:
- n := n.(*ir.CallExpr)
- w.op(n.Op())
- w.pos(n.Pos())
- w.exprList(n.Args) // emits terminating OEND
- // only append() calls may contain '...' arguments
- if n.Op() == ir.OAPPEND {
- w.bool(n.IsDDD)
- } else if n.IsDDD {
- base.Fatalf("exporter: unexpected '...' with %v call", n.Op())
- }
-
- case ir.OCALL, ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER, ir.OGETG:
- n := n.(*ir.CallExpr)
- w.op(ir.OCALL)
- w.pos(n.Pos())
- w.stmtList(n.Init())
- w.expr(n.X)
- w.exprList(n.Args)
- w.bool(n.IsDDD)
-
- case ir.OMAKEMAP, ir.OMAKECHAN, ir.OMAKESLICE:
- n := n.(*ir.MakeExpr)
- w.op(n.Op()) // must keep separate from OMAKE for importer
- w.pos(n.Pos())
- w.typ(n.Type())
- switch {
- default:
- // empty list
- w.op(ir.OEND)
- case n.Cap != nil:
- w.expr(n.Len)
- w.expr(n.Cap)
- w.op(ir.OEND)
- case n.Len != nil && (n.Op() == ir.OMAKESLICE || !n.Len.Type().IsUntyped()):
- w.expr(n.Len)
- w.op(ir.OEND)
- }
-
- // unary expressions
- case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT, ir.ORECV:
- n := n.(*ir.UnaryExpr)
- w.op(n.Op())
- w.pos(n.Pos())
- w.expr(n.X)
-
- case ir.OADDR:
- n := n.(*ir.AddrExpr)
- w.op(n.Op())
- w.pos(n.Pos())
- w.expr(n.X)
-
- case ir.ODEREF:
- n := n.(*ir.StarExpr)
- w.op(n.Op())
- w.pos(n.Pos())
- w.expr(n.X)
-
- case ir.OSEND:
- n := n.(*ir.SendStmt)
- w.op(n.Op())
- w.pos(n.Pos())
- w.expr(n.Chan)
- w.expr(n.Value)
-
- // binary expressions
- case ir.OADD, ir.OAND, ir.OANDNOT, ir.ODIV, ir.OEQ, ir.OGE, ir.OGT, ir.OLE, ir.OLT,
- ir.OLSH, ir.OMOD, ir.OMUL, ir.ONE, ir.OOR, ir.ORSH, ir.OSUB, ir.OXOR:
- n := n.(*ir.BinaryExpr)
- w.op(n.Op())
- w.pos(n.Pos())
- w.expr(n.X)
- w.expr(n.Y)
-
- case ir.OANDAND, ir.OOROR:
- n := n.(*ir.LogicalExpr)
- w.op(n.Op())
- w.pos(n.Pos())
- w.expr(n.X)
- w.expr(n.Y)
-
- case ir.OADDSTR:
- n := n.(*ir.AddStringExpr)
- w.op(ir.OADDSTR)
- w.pos(n.Pos())
- w.exprList(n.List)
-
- case ir.ODCLCONST:
- // if exporting, DCLCONST should just be removed as its usage
- // has already been replaced with literals
-
- default:
- base.Fatalf("cannot export %v (%d) node\n"+
- "\t==> please file an issue and assign to gri@", n.Op(), int(n.Op()))
- }
-}
-
-func (w *exportWriter) op(op ir.Op) {
- w.uint64(uint64(op))
-}
-
-func (w *exportWriter) exprsOrNil(a, b ir.Node) {
- ab := 0
- if a != nil {
- ab |= 1
- }
- if b != nil {
- ab |= 2
- }
- w.uint64(uint64(ab))
- if ab&1 != 0 {
- w.expr(a)
- }
- if ab&2 != 0 {
- w.node(b)
- }
-}
-
-func (w *exportWriter) fieldList(list ir.Nodes) {
- w.uint64(uint64(len(list)))
- for _, n := range list {
- n := n.(*ir.StructKeyExpr)
- w.selector(n.Field)
- w.expr(n.Value)
- }
-}
-
-func (w *exportWriter) localName(n *ir.Name) {
- // Escape analysis happens after inline bodies are saved, but
- // we're using the same ONAME nodes, so we might still see
- // PAUTOHEAP here.
- //
- // Check for Stackcopy to identify PAUTOHEAP that came from
- // PPARAM/PPARAMOUT, because we only want to include vargen in
- // non-param names.
- var v int32
- if n.Class_ == ir.PAUTO || (n.Class_ == ir.PAUTOHEAP && n.Name().Stackcopy == nil) {
- v = n.Name().Vargen
- }
-
- w.localIdent(n.Sym(), v)
-}
-
-func (w *exportWriter) localIdent(s *types.Sym, v int32) {
- // Anonymous parameters.
- if s == nil {
- w.string("")
- return
- }
-
- name := s.Name
- if name == "_" {
- w.string("_")
- return
- }
-
- // TODO(mdempsky): Fix autotmp hack.
- if i := strings.LastIndex(name, "."); i >= 0 && !strings.HasPrefix(name, ".autotmp_") {
- base.Fatalf("unexpected dot in identifier: %v", name)
- }
-
- if v > 0 {
- if strings.Contains(name, "·") {
- base.Fatalf("exporter: unexpected · in symbol name")
- }
- name = fmt.Sprintf("%s·%d", name, v)
- }
-
- if !types.IsExported(name) && s.Pkg != w.currPkg {
- base.Fatalf("weird package in name: %v => %v, not %q", s, name, w.currPkg.Path)
- }
-
- w.string(name)
-}
-
-type intWriter struct {
- bytes.Buffer
-}
-
-func (w *intWriter) int64(x int64) {
- var buf [binary.MaxVarintLen64]byte
- n := binary.PutVarint(buf[:], x)
- w.Write(buf[:n])
-}
-
-func (w *intWriter) uint64(x uint64) {
- var buf [binary.MaxVarintLen64]byte
- n := binary.PutUvarint(buf[:], x)
- w.Write(buf[:n])
-}
diff --git a/src/cmd/compile/internal/gc/iimport.go b/src/cmd/compile/internal/gc/iimport.go
deleted file mode 100644
index e9dc2a3248..0000000000
--- a/src/cmd/compile/internal/gc/iimport.go
+++ /dev/null
@@ -1,1141 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Indexed package import.
-// See iexport.go for the export data format.
-
-package gc
-
-import (
- "cmd/compile/internal/base"
- "cmd/compile/internal/ir"
- "cmd/compile/internal/types"
- "cmd/internal/bio"
- "cmd/internal/goobj"
- "cmd/internal/obj"
- "cmd/internal/src"
- "encoding/binary"
- "fmt"
- "go/constant"
- "io"
- "math/big"
- "os"
- "strings"
-)
-
-// An iimporterAndOffset identifies an importer and an offset within
-// its data section.
-type iimporterAndOffset struct {
- p *iimporter
- off uint64
-}
-
-var (
- // declImporter maps from imported identifiers to an importer
- // and offset where that identifier's declaration can be read.
- declImporter = map[*types.Sym]iimporterAndOffset{}
-
- // inlineImporter is like declImporter, but for inline bodies
- // for function and method symbols.
- inlineImporter = map[*types.Sym]iimporterAndOffset{}
-)
-
-func expandDecl(n ir.Node) ir.Node {
- if n, ok := n.(*ir.Name); ok {
- return n
- }
-
- id := n.(*ir.Ident)
- if n := id.Sym().PkgDef(); n != nil {
- return n.(*ir.Name)
- }
-
- r := importReaderFor(id.Sym(), declImporter)
- if r == nil {
- // Can happen if user tries to reference an undeclared name.
- return n
- }
-
- return r.doDecl(n.Sym())
-}
-
-func expandInline(fn *ir.Func) {
- if fn.Inl.Body != nil {
- return
- }
-
- r := importReaderFor(fn.Nname.Sym(), inlineImporter)
- if r == nil {
- base.Fatalf("missing import reader for %v", fn)
- }
-
- r.doInline(fn)
-}
-
-func importReaderFor(sym *types.Sym, importers map[*types.Sym]iimporterAndOffset) *importReader {
- x, ok := importers[sym]
- if !ok {
- return nil
- }
-
- return x.p.newReader(x.off, sym.Pkg)
-}
-
-type intReader struct {
- *bio.Reader
- pkg *types.Pkg
-}
-
-func (r *intReader) int64() int64 {
- i, err := binary.ReadVarint(r.Reader)
- if err != nil {
- base.Errorf("import %q: read error: %v", r.pkg.Path, err)
- base.ErrorExit()
- }
- return i
-}
-
-func (r *intReader) uint64() uint64 {
- i, err := binary.ReadUvarint(r.Reader)
- if err != nil {
- base.Errorf("import %q: read error: %v", r.pkg.Path, err)
- base.ErrorExit()
- }
- return i
-}
-
-func iimport(pkg *types.Pkg, in *bio.Reader) (fingerprint goobj.FingerprintType) {
- ird := &intReader{in, pkg}
-
- version := ird.uint64()
- if version != iexportVersion {
- base.Errorf("import %q: unknown export format version %d", pkg.Path, version)
- base.ErrorExit()
- }
-
- sLen := ird.uint64()
- dLen := ird.uint64()
-
- // Map string (and data) section into memory as a single large
- // string. This reduces heap fragmentation and allows
- // returning individual substrings very efficiently.
- data, err := mapFile(in.File(), in.Offset(), int64(sLen+dLen))
- if err != nil {
- base.Errorf("import %q: mapping input: %v", pkg.Path, err)
- base.ErrorExit()
- }
- stringData := data[:sLen]
- declData := data[sLen:]
-
- in.MustSeek(int64(sLen+dLen), os.SEEK_CUR)
-
- p := &iimporter{
- ipkg: pkg,
-
- pkgCache: map[uint64]*types.Pkg{},
- posBaseCache: map[uint64]*src.PosBase{},
- typCache: map[uint64]*types.Type{},
-
- stringData: stringData,
- declData: declData,
- }
-
- for i, pt := range predeclared() {
- p.typCache[uint64(i)] = pt
- }
-
- // Declaration index.
- for nPkgs := ird.uint64(); nPkgs > 0; nPkgs-- {
- pkg := p.pkgAt(ird.uint64())
- pkgName := p.stringAt(ird.uint64())
- pkgHeight := int(ird.uint64())
- if pkg.Name == "" {
- pkg.Name = pkgName
- pkg.Height = pkgHeight
- types.NumImport[pkgName]++
-
- // TODO(mdempsky): This belongs somewhere else.
- pkg.Lookup("_").Def = ir.BlankNode
- } else {
- if pkg.Name != pkgName {
- base.Fatalf("conflicting package names %v and %v for path %q", pkg.Name, pkgName, pkg.Path)
- }
- if pkg.Height != pkgHeight {
- base.Fatalf("conflicting package heights %v and %v for path %q", pkg.Height, pkgHeight, pkg.Path)
- }
- }
-
- for nSyms := ird.uint64(); nSyms > 0; nSyms-- {
- s := pkg.Lookup(p.stringAt(ird.uint64()))
- off := ird.uint64()
-
- if _, ok := declImporter[s]; !ok {
- declImporter[s] = iimporterAndOffset{p, off}
- }
- }
- }
-
- // Inline body index.
- for nPkgs := ird.uint64(); nPkgs > 0; nPkgs-- {
- pkg := p.pkgAt(ird.uint64())
-
- for nSyms := ird.uint64(); nSyms > 0; nSyms-- {
- s := pkg.Lookup(p.stringAt(ird.uint64()))
- off := ird.uint64()
-
- if _, ok := inlineImporter[s]; !ok {
- inlineImporter[s] = iimporterAndOffset{p, off}
- }
- }
- }
-
- // Fingerprint.
- _, err = io.ReadFull(in, fingerprint[:])
- if err != nil {
- base.Errorf("import %s: error reading fingerprint", pkg.Path)
- base.ErrorExit()
- }
- return fingerprint
-}
-
-type iimporter struct {
- ipkg *types.Pkg
-
- pkgCache map[uint64]*types.Pkg
- posBaseCache map[uint64]*src.PosBase
- typCache map[uint64]*types.Type
-
- stringData string
- declData string
-}
-
-func (p *iimporter) stringAt(off uint64) string {
- var x [binary.MaxVarintLen64]byte
- n := copy(x[:], p.stringData[off:])
-
- slen, n := binary.Uvarint(x[:n])
- if n <= 0 {
- base.Fatalf("varint failed")
- }
- spos := off + uint64(n)
- return p.stringData[spos : spos+slen]
-}
-
-func (p *iimporter) posBaseAt(off uint64) *src.PosBase {
- if posBase, ok := p.posBaseCache[off]; ok {
- return posBase
- }
-
- file := p.stringAt(off)
- posBase := src.NewFileBase(file, file)
- p.posBaseCache[off] = posBase
- return posBase
-}
-
-func (p *iimporter) pkgAt(off uint64) *types.Pkg {
- if pkg, ok := p.pkgCache[off]; ok {
- return pkg
- }
-
- pkg := p.ipkg
- if pkgPath := p.stringAt(off); pkgPath != "" {
- pkg = types.NewPkg(pkgPath, "")
- }
- p.pkgCache[off] = pkg
- return pkg
-}
-
-// An importReader keeps state for reading an individual imported
-// object (declaration or inline body).
-type importReader struct {
- strings.Reader
- p *iimporter
-
- currPkg *types.Pkg
- prevBase *src.PosBase
- prevLine int64
- prevColumn int64
-}
-
-func (p *iimporter) newReader(off uint64, pkg *types.Pkg) *importReader {
- r := &importReader{
- p: p,
- currPkg: pkg,
- }
- // (*strings.Reader).Reset wasn't added until Go 1.7, and we
- // need to build with Go 1.4.
- r.Reader = *strings.NewReader(p.declData[off:])
- return r
-}
-
-func (r *importReader) string() string { return r.p.stringAt(r.uint64()) }
-func (r *importReader) posBase() *src.PosBase { return r.p.posBaseAt(r.uint64()) }
-func (r *importReader) pkg() *types.Pkg { return r.p.pkgAt(r.uint64()) }
-
-func (r *importReader) setPkg() {
- r.currPkg = r.pkg()
-}
-
-func (r *importReader) doDecl(sym *types.Sym) *ir.Name {
- tag := r.byte()
- pos := r.pos()
-
- switch tag {
- case 'A':
- typ := r.typ()
-
- return importalias(r.p.ipkg, pos, sym, typ)
-
- case 'C':
- typ := r.typ()
- val := r.value(typ)
-
- return importconst(r.p.ipkg, pos, sym, typ, val)
-
- case 'F':
- typ := r.signature(nil)
-
- n := importfunc(r.p.ipkg, pos, sym, typ)
- r.funcExt(n)
- return n
-
- case 'T':
- // Types can be recursive. We need to setup a stub
- // declaration before recursing.
- n := importtype(r.p.ipkg, pos, sym)
- t := n.Type()
-
- // We also need to defer width calculations until
- // after the underlying type has been assigned.
- types.DeferCheckSize()
- underlying := r.typ()
- t.SetUnderlying(underlying)
- types.ResumeCheckSize()
-
- if underlying.IsInterface() {
- r.typeExt(t)
- return n
- }
-
- ms := make([]*types.Field, r.uint64())
- for i := range ms {
- mpos := r.pos()
- msym := r.ident()
- recv := r.param()
- mtyp := r.signature(recv)
-
- fn := ir.NewFunc(mpos)
- fn.SetType(mtyp)
- m := ir.NewFuncNameAt(mpos, ir.MethodSym(recv.Type, msym), fn)
- m.SetType(mtyp)
- m.Class_ = ir.PFUNC
- // methodSym already marked m.Sym as a function.
-
- f := types.NewField(mpos, msym, mtyp)
- f.Nname = m
- ms[i] = f
- }
- t.Methods().Set(ms)
-
- r.typeExt(t)
- for _, m := range ms {
- r.methExt(m)
- }
- return n
-
- case 'V':
- typ := r.typ()
-
- n := importvar(r.p.ipkg, pos, sym, typ)
- r.varExt(n)
- return n
-
- default:
- base.Fatalf("unexpected tag: %v", tag)
- panic("unreachable")
- }
-}
-
-func (p *importReader) value(typ *types.Type) constant.Value {
- switch constTypeOf(typ) {
- case constant.Bool:
- return constant.MakeBool(p.bool())
- case constant.String:
- return constant.MakeString(p.string())
- case constant.Int:
- var i big.Int
- p.mpint(&i, typ)
- return makeInt(&i)
- case constant.Float:
- return p.float(typ)
- case constant.Complex:
- return makeComplex(p.float(typ), p.float(typ))
- }
-
- base.Fatalf("unexpected value type: %v", typ)
- panic("unreachable")
-}
-
-func (p *importReader) mpint(x *big.Int, typ *types.Type) {
- signed, maxBytes := intSize(typ)
-
- maxSmall := 256 - maxBytes
- if signed {
- maxSmall = 256 - 2*maxBytes
- }
- if maxBytes == 1 {
- maxSmall = 256
- }
-
- n, _ := p.ReadByte()
- if uint(n) < maxSmall {
- v := int64(n)
- if signed {
- v >>= 1
- if n&1 != 0 {
- v = ^v
- }
- }
- x.SetInt64(v)
- return
- }
-
- v := -n
- if signed {
- v = -(n &^ 1) >> 1
- }
- if v < 1 || uint(v) > maxBytes {
- base.Fatalf("weird decoding: %v, %v => %v", n, signed, v)
- }
- b := make([]byte, v)
- p.Read(b)
- x.SetBytes(b)
- if signed && n&1 != 0 {
- x.Neg(x)
- }
-}
-
-func (p *importReader) float(typ *types.Type) constant.Value {
- var mant big.Int
- p.mpint(&mant, typ)
- var f big.Float
- f.SetInt(&mant)
- if f.Sign() != 0 {
- f.SetMantExp(&f, int(p.int64()))
- }
- return constant.Make(&f)
-}
-
-func (r *importReader) ident() *types.Sym {
- name := r.string()
- if name == "" {
- return nil
- }
- pkg := r.currPkg
- if types.IsExported(name) {
- pkg = types.LocalPkg
- }
- return pkg.Lookup(name)
-}
-
-func (r *importReader) qualifiedIdent() *ir.Ident {
- name := r.string()
- pkg := r.pkg()
- sym := pkg.Lookup(name)
- return ir.NewIdent(src.NoXPos, sym)
-}
-
-func (r *importReader) pos() src.XPos {
- delta := r.int64()
- r.prevColumn += delta >> 1
- if delta&1 != 0 {
- delta = r.int64()
- r.prevLine += delta >> 1
- if delta&1 != 0 {
- r.prevBase = r.posBase()
- }
- }
-
- if (r.prevBase == nil || r.prevBase.AbsFilename() == "") && r.prevLine == 0 && r.prevColumn == 0 {
- // TODO(mdempsky): Remove once we reliably write
- // position information for all nodes.
- return src.NoXPos
- }
-
- if r.prevBase == nil {
- base.Fatalf("missing posbase")
- }
- pos := src.MakePos(r.prevBase, uint(r.prevLine), uint(r.prevColumn))
- return base.Ctxt.PosTable.XPos(pos)
-}
-
-func (r *importReader) typ() *types.Type {
- return r.p.typAt(r.uint64())
-}
-
-func (p *iimporter) typAt(off uint64) *types.Type {
- t, ok := p.typCache[off]
- if !ok {
- if off < predeclReserved {
- base.Fatalf("predeclared type missing from cache: %d", off)
- }
- t = p.newReader(off-predeclReserved, nil).typ1()
- p.typCache[off] = t
- }
- return t
-}
-
-func (r *importReader) typ1() *types.Type {
- switch k := r.kind(); k {
- default:
- base.Fatalf("unexpected kind tag in %q: %v", r.p.ipkg.Path, k)
- return nil
-
- case definedType:
- // We might be called from within doInline, in which
- // case Sym.Def can point to declared parameters
- // instead of the top-level types. Also, we don't
- // support inlining functions with local defined
- // types. Therefore, this must be a package-scope
- // type.
- n := expandDecl(r.qualifiedIdent())
- if n.Op() != ir.OTYPE {
- base.Fatalf("expected OTYPE, got %v: %v, %v", n.Op(), n.Sym(), n)
- }
- return n.Type()
- case pointerType:
- return types.NewPtr(r.typ())
- case sliceType:
- return types.NewSlice(r.typ())
- case arrayType:
- n := r.uint64()
- return types.NewArray(r.typ(), int64(n))
- case chanType:
- dir := types.ChanDir(r.uint64())
- return types.NewChan(r.typ(), dir)
- case mapType:
- return types.NewMap(r.typ(), r.typ())
-
- case signatureType:
- r.setPkg()
- return r.signature(nil)
-
- case structType:
- r.setPkg()
-
- fs := make([]*types.Field, r.uint64())
- for i := range fs {
- pos := r.pos()
- sym := r.ident()
- typ := r.typ()
- emb := r.bool()
- note := r.string()
-
- f := types.NewField(pos, sym, typ)
- if emb {
- f.Embedded = 1
- }
- f.Note = note
- fs[i] = f
- }
-
- return types.NewStruct(r.currPkg, fs)
-
- case interfaceType:
- r.setPkg()
-
- embeddeds := make([]*types.Field, r.uint64())
- for i := range embeddeds {
- pos := r.pos()
- typ := r.typ()
-
- embeddeds[i] = types.NewField(pos, nil, typ)
- }
-
- methods := make([]*types.Field, r.uint64())
- for i := range methods {
- pos := r.pos()
- sym := r.ident()
- typ := r.signature(fakeRecvField())
-
- methods[i] = types.NewField(pos, sym, typ)
- }
-
- t := types.NewInterface(r.currPkg, append(embeddeds, methods...))
-
- // Ensure we expand the interface in the frontend (#25055).
- types.CheckSize(t)
- return t
- }
-}
-
-func (r *importReader) kind() itag {
- return itag(r.uint64())
-}
-
-func (r *importReader) signature(recv *types.Field) *types.Type {
- params := r.paramList()
- results := r.paramList()
- if n := len(params); n > 0 {
- params[n-1].SetIsDDD(r.bool())
- }
- return types.NewSignature(r.currPkg, recv, params, results)
-}
-
-func (r *importReader) paramList() []*types.Field {
- fs := make([]*types.Field, r.uint64())
- for i := range fs {
- fs[i] = r.param()
- }
- return fs
-}
-
-func (r *importReader) param() *types.Field {
- return types.NewField(r.pos(), r.ident(), r.typ())
-}
-
-func (r *importReader) bool() bool {
- return r.uint64() != 0
-}
-
-func (r *importReader) int64() int64 {
- n, err := binary.ReadVarint(r)
- if err != nil {
- base.Fatalf("readVarint: %v", err)
- }
- return n
-}
-
-func (r *importReader) uint64() uint64 {
- n, err := binary.ReadUvarint(r)
- if err != nil {
- base.Fatalf("readVarint: %v", err)
- }
- return n
-}
-
-func (r *importReader) byte() byte {
- x, err := r.ReadByte()
- if err != nil {
- base.Fatalf("declReader.ReadByte: %v", err)
- }
- return x
-}
-
-// Compiler-specific extensions.
-
-func (r *importReader) varExt(n ir.Node) {
- r.linkname(n.Sym())
- r.symIdx(n.Sym())
-}
-
-func (r *importReader) funcExt(n *ir.Name) {
- r.linkname(n.Sym())
- r.symIdx(n.Sym())
-
- // Escape analysis.
- for _, fs := range &types.RecvsParams {
- for _, f := range fs(n.Type()).FieldSlice() {
- f.Note = r.string()
- }
- }
-
- // Inline body.
- if u := r.uint64(); u > 0 {
- n.Func.Inl = &ir.Inline{
- Cost: int32(u - 1),
- }
- n.Func.Endlineno = r.pos()
- }
-}
-
-func (r *importReader) methExt(m *types.Field) {
- if r.bool() {
- m.SetNointerface(true)
- }
- r.funcExt(m.Nname.(*ir.Name))
-}
-
-func (r *importReader) linkname(s *types.Sym) {
- s.Linkname = r.string()
-}
-
-func (r *importReader) symIdx(s *types.Sym) {
- lsym := s.Linksym()
- idx := int32(r.int64())
- if idx != -1 {
- if s.Linkname != "" {
- base.Fatalf("bad index for linknamed symbol: %v %d\n", lsym, idx)
- }
- lsym.SymIdx = idx
- lsym.Set(obj.AttrIndexed, true)
- }
-}
-
-func (r *importReader) typeExt(t *types.Type) {
- t.SetNotInHeap(r.bool())
- i, pi := r.int64(), r.int64()
- if i != -1 && pi != -1 {
- typeSymIdx[t] = [2]int64{i, pi}
- }
-}
-
-// Map imported type T to the index of type descriptor symbols of T and *T,
-// so we can use index to reference the symbol.
-var typeSymIdx = make(map[*types.Type][2]int64)
-
-func BaseTypeIndex(t *types.Type) int64 {
- tbase := t
- if t.IsPtr() && t.Sym() == nil && t.Elem().Sym() != nil {
- tbase = t.Elem()
- }
- i, ok := typeSymIdx[tbase]
- if !ok {
- return -1
- }
- if t != tbase {
- return i[1]
- }
- return i[0]
-}
-
-func (r *importReader) doInline(fn *ir.Func) {
- if len(fn.Inl.Body) != 0 {
- base.Fatalf("%v already has inline body", fn)
- }
-
- funchdr(fn)
- body := r.stmtList()
- funcbody()
- if body == nil {
- //
- // Make sure empty body is not interpreted as
- // no inlineable body (see also parser.fnbody)
- // (not doing so can cause significant performance
- // degradation due to unnecessary calls to empty
- // functions).
- body = []ir.Node{}
- }
- fn.Inl.Body = body
-
- importlist = append(importlist, fn)
-
- if base.Flag.E > 0 && base.Flag.LowerM > 2 {
- if base.Flag.LowerM > 3 {
- fmt.Printf("inl body for %v %v: %+v\n", fn, fn.Type(), ir.Nodes(fn.Inl.Body))
- } else {
- fmt.Printf("inl body for %v %v: %v\n", fn, fn.Type(), ir.Nodes(fn.Inl.Body))
- }
- }
-}
-
-// ----------------------------------------------------------------------------
-// Inlined function bodies
-
-// Approach: Read nodes and use them to create/declare the same data structures
-// as done originally by the (hidden) parser by closely following the parser's
-// original code. In other words, "parsing" the import data (which happens to
-// be encoded in binary rather textual form) is the best way at the moment to
-// re-establish the syntax tree's invariants. At some future point we might be
-// able to avoid this round-about way and create the rewritten nodes directly,
-// possibly avoiding a lot of duplicate work (name resolution, type checking).
-//
-// Refined nodes (e.g., ODOTPTR as a refinement of OXDOT) are exported as their
-// unrefined nodes (since this is what the importer uses). The respective case
-// entries are unreachable in the importer.
-
-func (r *importReader) stmtList() []ir.Node {
- var list []ir.Node
- for {
- n := r.node()
- if n == nil {
- break
- }
- // OBLOCK nodes are not written to the import data directly,
- // but the handling of ODCL calls liststmt, which creates one.
- // Inline them into the statement list.
- if n.Op() == ir.OBLOCK {
- n := n.(*ir.BlockStmt)
- list = append(list, n.List...)
- } else {
- list = append(list, n)
- }
-
- }
- return list
-}
-
-func (r *importReader) caseList(sw ir.Node) []ir.Node {
- namedTypeSwitch := isNamedTypeSwitch(sw)
-
- cases := make([]ir.Node, r.uint64())
- for i := range cases {
- cas := ir.NewCaseStmt(r.pos(), nil, nil)
- cas.List.Set(r.stmtList())
- if namedTypeSwitch {
- // Note: per-case variables will have distinct, dotted
- // names after import. That's okay: swt.go only needs
- // Sym for diagnostics anyway.
- caseVar := ir.NewNameAt(cas.Pos(), r.ident())
- declare(caseVar, dclcontext)
- cas.Vars = []ir.Node{caseVar}
- caseVar.Defn = sw.(*ir.SwitchStmt).Tag
- }
- cas.Body.Set(r.stmtList())
- cases[i] = cas
- }
- return cases
-}
-
-func (r *importReader) exprList() []ir.Node {
- var list []ir.Node
- for {
- n := r.expr()
- if n == nil {
- break
- }
- list = append(list, n)
- }
- return list
-}
-
-func (r *importReader) expr() ir.Node {
- n := r.node()
- if n != nil && n.Op() == ir.OBLOCK {
- n := n.(*ir.BlockStmt)
- base.Fatalf("unexpected block node: %v", n)
- }
- return n
-}
-
-// TODO(gri) split into expr and stmt
-func (r *importReader) node() ir.Node {
- switch op := r.op(); op {
- // expressions
- // case OPAREN:
- // unreachable - unpacked by exporter
-
- case ir.ONIL:
- pos := r.pos()
- typ := r.typ()
-
- n := npos(pos, nodnil())
- n.SetType(typ)
- return n
-
- case ir.OLITERAL:
- pos := r.pos()
- typ := r.typ()
-
- n := npos(pos, ir.NewLiteral(r.value(typ)))
- n.SetType(typ)
- return n
-
- case ir.ONONAME:
- return r.qualifiedIdent()
-
- case ir.ONAME:
- return r.ident().Def.(*ir.Name)
-
- // case OPACK, ONONAME:
- // unreachable - should have been resolved by typechecking
-
- case ir.OTYPE:
- return ir.TypeNode(r.typ())
-
- case ir.OTYPESW:
- pos := r.pos()
- var tag *ir.Ident
- if s := r.ident(); s != nil {
- tag = ir.NewIdent(pos, s)
- }
- expr, _ := r.exprsOrNil()
- return ir.NewTypeSwitchGuard(pos, tag, expr)
-
- // case OTARRAY, OTMAP, OTCHAN, OTSTRUCT, OTINTER, OTFUNC:
- // unreachable - should have been resolved by typechecking
-
- // case OCLOSURE:
- // unimplemented
-
- // case OPTRLIT:
- // unreachable - mapped to case OADDR below by exporter
-
- case ir.OSTRUCTLIT:
- // TODO(mdempsky): Export position information for OSTRUCTKEY nodes.
- savedlineno := base.Pos
- base.Pos = r.pos()
- n := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(r.typ()).(ir.Ntype), nil)
- n.List.Set(r.elemList()) // special handling of field names
- base.Pos = savedlineno
- return n
-
- // case OARRAYLIT, OSLICELIT, OMAPLIT:
- // unreachable - mapped to case OCOMPLIT below by exporter
-
- case ir.OCOMPLIT:
- n := ir.NewCompLitExpr(r.pos(), ir.OCOMPLIT, ir.TypeNode(r.typ()).(ir.Ntype), nil)
- n.List.Set(r.exprList())
- return n
-
- case ir.OKEY:
- pos := r.pos()
- left, right := r.exprsOrNil()
- return ir.NewKeyExpr(pos, left, right)
-
- // case OSTRUCTKEY:
- // unreachable - handled in case OSTRUCTLIT by elemList
-
- // case OCALLPART:
- // unreachable - mapped to case OXDOT below by exporter
-
- // case OXDOT, ODOT, ODOTPTR, ODOTINTER, ODOTMETH:
- // unreachable - mapped to case OXDOT below by exporter
-
- case ir.OXDOT:
- // see parser.new_dotname
- return ir.NewSelectorExpr(r.pos(), ir.OXDOT, r.expr(), r.ident())
-
- // case ODOTTYPE, ODOTTYPE2:
- // unreachable - mapped to case ODOTTYPE below by exporter
-
- case ir.ODOTTYPE:
- n := ir.NewTypeAssertExpr(r.pos(), r.expr(), nil)
- n.SetType(r.typ())
- return n
-
- // case OINDEX, OINDEXMAP, OSLICE, OSLICESTR, OSLICEARR, OSLICE3, OSLICE3ARR:
- // unreachable - mapped to cases below by exporter
-
- case ir.OINDEX:
- return ir.NewIndexExpr(r.pos(), r.expr(), r.expr())
-
- case ir.OSLICE, ir.OSLICE3:
- n := ir.NewSliceExpr(r.pos(), op, r.expr())
- low, high := r.exprsOrNil()
- var max ir.Node
- if n.Op().IsSlice3() {
- max = r.expr()
- }
- n.SetSliceBounds(low, high, max)
- return n
-
- // case OCONV, OCONVIFACE, OCONVNOP, OBYTES2STR, ORUNES2STR, OSTR2BYTES, OSTR2RUNES, ORUNESTR:
- // unreachable - mapped to OCONV case below by exporter
-
- case ir.OCONV:
- n := ir.NewConvExpr(r.pos(), ir.OCONV, nil, r.expr())
- n.SetType(r.typ())
- return n
-
- case ir.OCOPY, ir.OCOMPLEX, ir.OREAL, ir.OIMAG, ir.OAPPEND, ir.OCAP, ir.OCLOSE, ir.ODELETE, ir.OLEN, ir.OMAKE, ir.ONEW, ir.OPANIC, ir.ORECOVER, ir.OPRINT, ir.OPRINTN:
- n := builtinCall(r.pos(), op)
- n.Args.Set(r.exprList())
- if op == ir.OAPPEND {
- n.IsDDD = r.bool()
- }
- return n
-
- // case OCALLFUNC, OCALLMETH, OCALLINTER, OGETG:
- // unreachable - mapped to OCALL case below by exporter
-
- case ir.OCALL:
- n := ir.NewCallExpr(r.pos(), ir.OCALL, nil, nil)
- n.PtrInit().Set(r.stmtList())
- n.X = r.expr()
- n.Args.Set(r.exprList())
- n.IsDDD = r.bool()
- return n
-
- case ir.OMAKEMAP, ir.OMAKECHAN, ir.OMAKESLICE:
- n := builtinCall(r.pos(), ir.OMAKE)
- n.Args.Append(ir.TypeNode(r.typ()))
- n.Args.Append(r.exprList()...)
- return n
-
- // unary expressions
- case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT, ir.ORECV:
- return ir.NewUnaryExpr(r.pos(), op, r.expr())
-
- case ir.OADDR:
- return nodAddrAt(r.pos(), r.expr())
-
- case ir.ODEREF:
- return ir.NewStarExpr(r.pos(), r.expr())
-
- // binary expressions
- case ir.OADD, ir.OAND, ir.OANDNOT, ir.ODIV, ir.OEQ, ir.OGE, ir.OGT, ir.OLE, ir.OLT,
- ir.OLSH, ir.OMOD, ir.OMUL, ir.ONE, ir.OOR, ir.ORSH, ir.OSUB, ir.OXOR:
- return ir.NewBinaryExpr(r.pos(), op, r.expr(), r.expr())
-
- case ir.OANDAND, ir.OOROR:
- return ir.NewLogicalExpr(r.pos(), op, r.expr(), r.expr())
-
- case ir.OSEND:
- return ir.NewSendStmt(r.pos(), r.expr(), r.expr())
-
- case ir.OADDSTR:
- pos := r.pos()
- list := r.exprList()
- x := npos(pos, list[0])
- for _, y := range list[1:] {
- x = ir.NewBinaryExpr(pos, ir.OADD, x, y)
- }
- return x
-
- // --------------------------------------------------------------------
- // statements
- case ir.ODCL:
- pos := r.pos()
- lhs := ir.NewDeclNameAt(pos, ir.ONAME, r.ident())
- lhs.SetType(r.typ())
-
- declare(lhs, ir.PAUTO)
-
- var stmts ir.Nodes
- stmts.Append(ir.NewDecl(base.Pos, ir.ODCL, lhs))
- stmts.Append(ir.NewAssignStmt(base.Pos, lhs, nil))
- return ir.NewBlockStmt(pos, stmts)
-
- // case OAS, OASWB:
- // unreachable - mapped to OAS case below by exporter
-
- case ir.OAS:
- return ir.NewAssignStmt(r.pos(), r.expr(), r.expr())
-
- case ir.OASOP:
- n := ir.NewAssignOpStmt(r.pos(), ir.OXXX, nil, nil)
- n.AsOp = r.op()
- n.X = r.expr()
- if !r.bool() {
- n.Y = ir.NewInt(1)
- n.IncDec = true
- } else {
- n.Y = r.expr()
- }
- return n
-
- // case OAS2DOTTYPE, OAS2FUNC, OAS2MAPR, OAS2RECV:
- // unreachable - mapped to OAS2 case below by exporter
-
- case ir.OAS2:
- n := ir.NewAssignListStmt(r.pos(), ir.OAS2, nil, nil)
- n.Lhs.Set(r.exprList())
- n.Rhs.Set(r.exprList())
- return n
-
- case ir.ORETURN:
- n := ir.NewReturnStmt(r.pos(), nil)
- n.Results.Set(r.exprList())
- return n
-
- // case ORETJMP:
- // unreachable - generated by compiler for trampolin routines (not exported)
-
- case ir.OGO, ir.ODEFER:
- return ir.NewGoDeferStmt(r.pos(), op, r.expr())
-
- case ir.OIF:
- n := ir.NewIfStmt(r.pos(), nil, nil, nil)
- n.PtrInit().Set(r.stmtList())
- n.Cond = r.expr()
- n.Body.Set(r.stmtList())
- n.Else.Set(r.stmtList())
- return n
-
- case ir.OFOR:
- n := ir.NewForStmt(r.pos(), nil, nil, nil, nil)
- n.PtrInit().Set(r.stmtList())
- left, right := r.exprsOrNil()
- n.Cond = left
- n.Post = right
- n.Body.Set(r.stmtList())
- return n
-
- case ir.ORANGE:
- n := ir.NewRangeStmt(r.pos(), nil, nil, nil)
- n.Vars.Set(r.stmtList())
- n.X = r.expr()
- n.Body.Set(r.stmtList())
- return n
-
- case ir.OSELECT:
- n := ir.NewSelectStmt(r.pos(), nil)
- n.PtrInit().Set(r.stmtList())
- r.exprsOrNil() // TODO(rsc): Delete (and fix exporter). These are always nil.
- n.Cases.Set(r.caseList(n))
- return n
-
- case ir.OSWITCH:
- n := ir.NewSwitchStmt(r.pos(), nil, nil)
- n.PtrInit().Set(r.stmtList())
- left, _ := r.exprsOrNil()
- n.Tag = left
- n.Cases.Set(r.caseList(n))
- return n
-
- // case OCASE:
- // handled by caseList
-
- case ir.OFALL:
- n := ir.NewBranchStmt(r.pos(), ir.OFALL, nil)
- return n
-
- // case OEMPTY:
- // unreachable - not emitted by exporter
-
- case ir.OBREAK, ir.OCONTINUE, ir.OGOTO:
- var sym *types.Sym
- pos := r.pos()
- if label := r.string(); label != "" {
- sym = lookup(label)
- }
- return ir.NewBranchStmt(pos, op, sym)
-
- case ir.OLABEL:
- return ir.NewLabelStmt(r.pos(), lookup(r.string()))
-
- case ir.OEND:
- return nil
-
- default:
- base.Fatalf("cannot import %v (%d) node\n"+
- "\t==> please file an issue and assign to gri@", op, int(op))
- panic("unreachable") // satisfy compiler
- }
-}
-
-func (r *importReader) op() ir.Op {
- return ir.Op(r.uint64())
-}
-
-func (r *importReader) elemList() []ir.Node {
- c := r.uint64()
- list := make([]ir.Node, c)
- for i := range list {
- s := r.ident()
- list[i] = ir.NewStructKeyExpr(base.Pos, s, r.expr())
- }
- return list
-}
-
-func (r *importReader) exprsOrNil() (a, b ir.Node) {
- ab := r.uint64()
- if ab&1 != 0 {
- a = r.expr()
- }
- if ab&2 != 0 {
- b = r.node()
- }
- return
-}
-
-func builtinCall(pos src.XPos, op ir.Op) *ir.CallExpr {
- return ir.NewCallExpr(pos, ir.OCALL, ir.NewIdent(base.Pos, types.BuiltinPkg.Lookup(ir.OpNames[op])), nil)
-}
-
-func npos(pos src.XPos, n ir.Node) ir.Node {
- n.SetPos(pos)
- return n
-}
diff --git a/src/cmd/compile/internal/gc/init.go b/src/cmd/compile/internal/gc/init.go
index f22e49efba..ed61c11522 100644
--- a/src/cmd/compile/internal/gc/init.go
+++ b/src/cmd/compile/internal/gc/init.go
@@ -7,6 +7,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/obj"
)
@@ -17,12 +18,8 @@ import (
// the name, normally "pkg.init", is altered to "pkg.init.0".
var renameinitgen int
-// Function collecting autotmps generated during typechecking,
-// to be included in the package-level init function.
-var initTodo = ir.NewFunc(base.Pos)
-
func renameinit() *types.Sym {
- s := lookupN("init.", renameinitgen)
+ s := typecheck.LookupNum("init.", renameinitgen)
renameinitgen++
return s
}
@@ -34,14 +31,14 @@ func renameinit() *types.Sym {
// 2) Initialize all the variables that have initializers.
// 3) Run any init functions.
func fninit() *ir.Name {
- nf := initOrder(Target.Decls)
+ nf := initOrder(typecheck.Target.Decls)
var deps []*obj.LSym // initTask records for packages the current package depends on
var fns []*obj.LSym // functions to call for package initialization
// Find imported packages with init tasks.
- for _, pkg := range Target.Imports {
- n := resolve(ir.NewIdent(base.Pos, pkg.Lookup(".inittask")))
+ for _, pkg := range typecheck.Target.Imports {
+ n := typecheck.Resolve(ir.NewIdent(base.Pos, pkg.Lookup(".inittask")))
if n.Op() == ir.ONONAME {
continue
}
@@ -54,34 +51,34 @@ func fninit() *ir.Name {
// Make a function that contains all the initialization statements.
if len(nf) > 0 {
base.Pos = nf[0].Pos() // prolog/epilog gets line number of first init stmt
- initializers := lookup("init")
- fn := dclfunc(initializers, ir.NewFuncType(base.Pos, nil, nil, nil))
- for _, dcl := range initTodo.Dcl {
+ initializers := typecheck.Lookup("init")
+ fn := typecheck.DeclFunc(initializers, ir.NewFuncType(base.Pos, nil, nil, nil))
+ for _, dcl := range typecheck.InitTodoFunc.Dcl {
dcl.Curfn = fn
}
- fn.Dcl = append(fn.Dcl, initTodo.Dcl...)
- initTodo.Dcl = nil
+ fn.Dcl = append(fn.Dcl, typecheck.InitTodoFunc.Dcl...)
+ typecheck.InitTodoFunc.Dcl = nil
fn.Body.Set(nf)
- funcbody()
+ typecheck.FinishFuncBody()
- typecheckFunc(fn)
+ typecheck.Func(fn)
ir.CurFunc = fn
- typecheckslice(nf, ctxStmt)
+ typecheck.Stmts(nf)
ir.CurFunc = nil
- Target.Decls = append(Target.Decls, fn)
+ typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
fns = append(fns, initializers.Linksym())
}
- if initTodo.Dcl != nil {
+ if typecheck.InitTodoFunc.Dcl != nil {
// We only generate temps using initTodo if there
// are package-scope initialization statements, so
// something's weird if we get here.
base.Fatalf("initTodo still has declarations")
}
- initTodo = nil
+ typecheck.InitTodoFunc = nil
// Record user init functions.
- for _, fn := range Target.Inits {
+ for _, fn := range typecheck.Target.Inits {
// Skip init functions with empty bodies.
if len(fn.Body) == 1 {
if stmt := fn.Body[0]; stmt.Op() == ir.OBLOCK && len(stmt.(*ir.BlockStmt).List) == 0 {
@@ -96,8 +93,8 @@ func fninit() *ir.Name {
}
// Make an .inittask structure.
- sym := lookup(".inittask")
- task := NewName(sym)
+ sym := typecheck.Lookup(".inittask")
+ task := typecheck.NewName(sym)
task.SetType(types.Types[types.TUINT8]) // fake type
task.Class_ = ir.PEXTERN
sym.Def = task
diff --git a/src/cmd/compile/internal/gc/inl.go b/src/cmd/compile/internal/gc/inl.go
index b9e19da43f..9cf23caf0e 100644
--- a/src/cmd/compile/internal/gc/inl.go
+++ b/src/cmd/compile/internal/gc/inl.go
@@ -30,6 +30,7 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/logopt"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/src"
@@ -54,7 +55,7 @@ const (
func InlinePackage() {
// Find functions that can be inlined and clone them before walk expands them.
- ir.VisitFuncsBottomUp(Target.Decls, func(list []*ir.Func, recursive bool) {
+ ir.VisitFuncsBottomUp(typecheck.Target.Decls, func(list []*ir.Func, recursive bool) {
numfns := numNonClosures(list)
for _, n := range list {
if !recursive || numfns > 1 {
@@ -72,63 +73,6 @@ func InlinePackage() {
})
}
-// Get the function's package. For ordinary functions it's on the ->sym, but for imported methods
-// the ->sym can be re-used in the local package, so peel it off the receiver's type.
-func fnpkg(fn *ir.Name) *types.Pkg {
- if ir.IsMethod(fn) {
- // method
- rcvr := fn.Type().Recv().Type
-
- if rcvr.IsPtr() {
- rcvr = rcvr.Elem()
- }
- if rcvr.Sym() == nil {
- base.Fatalf("receiver with no sym: [%v] %L (%v)", fn.Sym(), fn, rcvr)
- }
- return rcvr.Sym().Pkg
- }
-
- // non-method
- return fn.Sym().Pkg
-}
-
-// Lazy typechecking of imported bodies. For local functions, caninl will set ->typecheck
-// because they're a copy of an already checked body.
-func typecheckinl(fn *ir.Func) {
- lno := ir.SetPos(fn.Nname)
-
- expandInline(fn)
-
- // typecheckinl is only for imported functions;
- // their bodies may refer to unsafe as long as the package
- // was marked safe during import (which was checked then).
- // the ->inl of a local function has been typechecked before caninl copied it.
- pkg := fnpkg(fn.Nname)
-
- if pkg == types.LocalPkg || pkg == nil {
- return // typecheckinl on local function
- }
-
- if base.Flag.LowerM > 2 || base.Debug.Export != 0 {
- fmt.Printf("typecheck import [%v] %L { %v }\n", fn.Sym(), fn, ir.Nodes(fn.Inl.Body))
- }
-
- savefn := ir.CurFunc
- ir.CurFunc = fn
- typecheckslice(fn.Inl.Body, ctxStmt)
- ir.CurFunc = savefn
-
- // During expandInline (which imports fn.Func.Inl.Body),
- // declarations are added to fn.Func.Dcl by funcHdr(). Move them
- // to fn.Func.Inl.Dcl for consistency with how local functions
- // behave. (Append because typecheckinl may be called multiple
- // times.)
- fn.Inl.Dcl = append(fn.Inl.Dcl, fn.Dcl...)
- fn.Dcl = nil
-
- base.Pos = lno
-}
-
// Caninl determines whether fn is inlineable.
// If so, caninl saves fn->nbody in fn->inl and substitutes it with a copy.
// fn and ->nbody will already have been typechecked.
@@ -270,7 +214,7 @@ func inlFlood(n *ir.Name, exportsym func(*ir.Name)) {
}
fn.SetExportInline(true)
- typecheckinl(fn)
+ typecheck.ImportedBody(fn)
// Recursively identify all referenced functions for
// reexport. We want to include even non-called functions,
@@ -601,7 +545,7 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No
as.Rhs.Set(inlconv2list(as.Rhs[0].(*ir.InlinedCallExpr)))
as.SetOp(ir.OAS2)
as.SetTypecheck(0)
- n = typecheck(as, ctxStmt)
+ n = typecheck.Stmt(as)
}
}
@@ -768,7 +712,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
inlMap[fn] = false
}()
if base.Debug.TypecheckInl == 0 {
- typecheckinl(fn)
+ typecheck.ImportedBody(fn)
}
// We have a function node, and it has an inlineable body.
@@ -824,21 +768,21 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
}
if v.Byval() {
- iv := typecheck(inlvar(v), ctxExpr)
+ iv := typecheck.Expr(inlvar(v))
ninit.Append(ir.NewDecl(base.Pos, ir.ODCL, iv))
- ninit.Append(typecheck(ir.NewAssignStmt(base.Pos, iv, o), ctxStmt))
+ ninit.Append(typecheck.Stmt(ir.NewAssignStmt(base.Pos, iv, o)))
inlvars[v] = iv
} else {
- addr := NewName(lookup("&" + v.Sym().Name))
+ addr := typecheck.NewName(typecheck.Lookup("&" + v.Sym().Name))
addr.SetType(types.NewPtr(v.Type()))
- ia := typecheck(inlvar(addr), ctxExpr)
+ ia := typecheck.Expr(inlvar(addr))
ninit.Append(ir.NewDecl(base.Pos, ir.ODCL, ia))
- ninit.Append(typecheck(ir.NewAssignStmt(base.Pos, ia, nodAddr(o)), ctxStmt))
+ ninit.Append(typecheck.Stmt(ir.NewAssignStmt(base.Pos, ia, typecheck.NodAddr(o))))
inlvars[addr] = ia
// When capturing by reference, all occurrence of the captured var
// must be substituted with dereference of the temporary address
- inlvars[v] = typecheck(ir.NewStarExpr(base.Pos, ia), ctxExpr)
+ inlvars[v] = typecheck.Expr(ir.NewStarExpr(base.Pos, ia))
}
}
}
@@ -857,7 +801,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
// nothing should have moved to the heap yet.
base.Fatalf("impossible: %v", ln)
}
- inlf := typecheck(inlvar(ln), ctxExpr)
+ inlf := typecheck.Expr(inlvar(ln))
inlvars[ln] = inlf
if base.Flag.GenDwarfInl > 0 {
if ln.Class_ == ir.PPARAM {
@@ -889,7 +833,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
if n := ir.AsNode(t.Nname); n != nil && !ir.IsBlank(n) && !strings.HasPrefix(n.Sym().Name, "~r") {
n := n.(*ir.Name)
m = inlvar(n)
- m = typecheck(m, ctxExpr)
+ m = typecheck.Expr(m)
inlvars[n] = m
delayretvars = false // found a named result parameter
} else {
@@ -951,7 +895,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
vas = ir.NewAssignStmt(base.Pos, nil, nil)
vas.X = inlParam(param, vas, inlvars)
if len(varargs) == 0 {
- vas.Y = nodnil()
+ vas.Y = typecheck.NodNil()
vas.Y.SetType(param.Type)
} else {
lit := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(param.Type).(ir.Ntype), nil)
@@ -961,11 +905,11 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
}
if len(as.Rhs) != 0 {
- ninit.Append(typecheck(as, ctxStmt))
+ ninit.Append(typecheck.Stmt(as))
}
if vas != nil {
- ninit.Append(typecheck(vas, ctxStmt))
+ ninit.Append(typecheck.Stmt(vas))
}
if !delayretvars {
@@ -973,11 +917,11 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
for _, n := range retvars {
ninit.Append(ir.NewDecl(base.Pos, ir.ODCL, n))
ras := ir.NewAssignStmt(base.Pos, n, nil)
- ninit.Append(typecheck(ras, ctxStmt))
+ ninit.Append(typecheck.Stmt(ras))
}
}
- retlabel := autolabel(".i")
+ retlabel := typecheck.AutoLabel(".i")
inlgen++
@@ -1021,7 +965,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
lab := ir.NewLabelStmt(base.Pos, retlabel)
body = append(body, lab)
- typecheckslice(body, ctxStmt)
+ typecheck.Stmts(body)
if base.Flag.GenDwarfInl > 0 {
for _, v := range inlfvars {
@@ -1061,7 +1005,7 @@ func inlvar(var_ ir.Node) ir.Node {
fmt.Printf("inlvar %+v\n", var_)
}
- n := NewName(var_.Sym())
+ n := typecheck.NewName(var_.Sym())
n.SetType(var_.Type())
n.Class_ = ir.PAUTO
n.SetUsed(true)
@@ -1074,7 +1018,7 @@ func inlvar(var_ ir.Node) ir.Node {
// Synthesize a variable to store the inlined function's results in.
func retvar(t *types.Field, i int) ir.Node {
- n := NewName(lookupN("~R", i))
+ n := typecheck.NewName(typecheck.LookupNum("~R", i))
n.SetType(t.Type)
n.Class_ = ir.PAUTO
n.SetUsed(true)
@@ -1086,7 +1030,7 @@ func retvar(t *types.Field, i int) ir.Node {
// Synthesize a variable to store the inlined function's arguments
// when they come from a multiple return call.
func argvar(t *types.Type, i int) ir.Node {
- n := NewName(lookupN("~arg", i))
+ n := typecheck.NewName(typecheck.LookupNum("~arg", i))
n.SetType(t.Elem())
n.Class_ = ir.PAUTO
n.SetUsed(true)
@@ -1198,10 +1142,10 @@ func (subst *inlsubst) node(n ir.Node) ir.Node {
}
}
- init = append(init, typecheck(as, ctxStmt))
+ init = append(init, typecheck.Stmt(as))
}
init = append(init, ir.NewBranchStmt(base.Pos, ir.OGOTO, subst.retlabel))
- typecheckslice(init, ctxStmt)
+ typecheck.Stmts(init)
return ir.NewBlockStmt(base.Pos, init)
case ir.OGOTO:
@@ -1210,7 +1154,7 @@ func (subst *inlsubst) node(n ir.Node) ir.Node {
m.SetPos(subst.updatedPos(m.Pos()))
m.PtrInit().Set(nil)
p := fmt.Sprintf("%s·%d", n.Label.Name, inlgen)
- m.Label = lookup(p)
+ m.Label = typecheck.Lookup(p)
return m
case ir.OLABEL:
@@ -1219,7 +1163,7 @@ func (subst *inlsubst) node(n ir.Node) ir.Node {
m.SetPos(subst.updatedPos(m.Pos()))
m.PtrInit().Set(nil)
p := fmt.Sprintf("%s·%d", n.Label.Name, inlgen)
- m.Label = lookup(p)
+ m.Label = typecheck.Lookup(p)
return m
}
@@ -1284,7 +1228,7 @@ func devirtualizeCall(call *ir.CallExpr) {
dt := ir.NewTypeAssertExpr(sel.Pos(), sel.X, nil)
dt.SetType(typ)
- x := typecheck(ir.NewSelectorExpr(sel.Pos(), ir.OXDOT, dt, sel.Sel), ctxExpr|ctxCallee)
+ x := typecheck.Callee(ir.NewSelectorExpr(sel.Pos(), ir.OXDOT, dt, sel.Sel))
switch x.Op() {
case ir.ODOTMETH:
x := x.(*ir.SelectorExpr)
diff --git a/src/cmd/compile/internal/gc/main.go b/src/cmd/compile/internal/gc/main.go
index 69ec5c8f2f..b98d1f2e10 100644
--- a/src/cmd/compile/internal/gc/main.go
+++ b/src/cmd/compile/internal/gc/main.go
@@ -13,6 +13,7 @@ import (
"cmd/compile/internal/ir"
"cmd/compile/internal/logopt"
"cmd/compile/internal/ssa"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/bio"
"cmd/internal/dwarf"
@@ -49,9 +50,6 @@ func hidePanic() {
}
}
-// Target is the package being compiled.
-var Target *ir.Package
-
// Main parses flags and Go source files specified in the command-line
// arguments, type-checks the parsed Go package, compiles functions to machine
// code, and finally writes the compiled package definition to disk.
@@ -197,18 +195,18 @@ func Main(archInit func(*Arch)) {
return typenamesym(t).Linksym()
}
- Target = new(ir.Package)
+ typecheck.Target = new(ir.Package)
- NeedFuncSym = makefuncsym
- NeedITab = func(t, iface *types.Type) { itabname(t, iface) }
- NeedRuntimeType = addsignat // TODO(rsc): typenamesym for lock?
+ typecheck.NeedFuncSym = makefuncsym
+ typecheck.NeedITab = func(t, iface *types.Type) { itabname(t, iface) }
+ typecheck.NeedRuntimeType = addsignat // TODO(rsc): typenamesym for lock?
base.AutogeneratedPos = makePos(src.NewFileBase("<autogenerated>", "<autogenerated>"), 1, 0)
types.TypeLinkSym = func(t *types.Type) *obj.LSym {
return typenamesym(t).Linksym()
}
- TypecheckInit()
+ typecheck.Init()
// Parse input.
base.Timer.Start("fe", "parse")
@@ -219,7 +217,7 @@ func Main(archInit func(*Arch)) {
recordPackageName()
// Typecheck.
- TypecheckPackage()
+ typecheck.Package()
// With all user code typechecked, it's now safe to verify unused dot imports.
checkDotImports()
@@ -227,7 +225,7 @@ func Main(archInit func(*Arch)) {
// Build init task.
if initTask := fninit(); initTask != nil {
- exportsym(initTask)
+ typecheck.Export(initTask)
}
// Inlining
@@ -237,7 +235,7 @@ func Main(archInit func(*Arch)) {
}
// Devirtualize.
- for _, n := range Target.Decls {
+ for _, n := range typecheck.Target.Decls {
if n.Op() == ir.ODCLFUNC {
devirtualize(n.(*ir.Func))
}
@@ -253,7 +251,7 @@ func Main(archInit func(*Arch)) {
// Large values are also moved off stack in escape analysis;
// because large values may contain pointers, it must happen early.
base.Timer.Start("fe", "escapes")
- escapes(Target.Decls)
+ escapes(typecheck.Target.Decls)
// Collect information for go:nowritebarrierrec
// checking. This must happen before transformclosure.
@@ -267,7 +265,7 @@ func Main(archInit func(*Arch)) {
// This needs to happen before walk, because closures must be transformed
// before walk reaches a call of a closure.
base.Timer.Start("fe", "xclosures")
- for _, n := range Target.Decls {
+ for _, n := range typecheck.Target.Decls {
if n.Op() == ir.ODCLFUNC {
n := n.(*ir.Func)
if n.OClosure != nil {
@@ -292,8 +290,8 @@ func Main(archInit func(*Arch)) {
// Don't use range--walk can add functions to Target.Decls.
base.Timer.Start("be", "compilefuncs")
fcount := int64(0)
- for i := 0; i < len(Target.Decls); i++ {
- n := Target.Decls[i]
+ for i := 0; i < len(typecheck.Target.Decls); i++ {
+ n := typecheck.Target.Decls[i]
if n.Op() == ir.ODCLFUNC {
funccompile(n.(*ir.Func))
fcount++
@@ -327,7 +325,7 @@ func Main(archInit func(*Arch)) {
}
CheckLargeStacks()
- CheckFuncStack()
+ typecheck.CheckFuncStack()
if len(compilequeue) != 0 {
base.Fatalf("%d uncompiled functions", len(compilequeue))
@@ -363,7 +361,7 @@ func CheckLargeStacks() {
func cgoSymABIs() {
// The linker expects an ABI0 wrapper for all cgo-exported
// functions.
- for _, prag := range Target.CgoPragmas {
+ for _, prag := range typecheck.Target.CgoPragmas {
switch prag[0] {
case "cgo_export_static", "cgo_export_dynamic":
if symabiRefs == nil {
@@ -581,33 +579,6 @@ func findpkg(name string) (file string, ok bool) {
return "", false
}
-// loadsys loads the definitions for the low-level runtime functions,
-// so that the compiler can generate calls to them,
-// but does not make them visible to user code.
-func loadsys() {
- types.Block = 1
-
- inimport = true
- typecheckok = true
-
- typs := runtimeTypes()
- for _, d := range &runtimeDecls {
- sym := ir.Pkgs.Runtime.Lookup(d.name)
- typ := typs[d.typ]
- switch d.tag {
- case funcTag:
- importfunc(ir.Pkgs.Runtime, src.NoXPos, sym, typ)
- case varTag:
- importvar(ir.Pkgs.Runtime, src.NoXPos, sym, typ)
- default:
- base.Fatalf("unhandled declaration tag %v", d.tag)
- }
- }
-
- typecheckok = false
- inimport = false
-}
-
// myheight tracks the local package's height based on packages
// imported so far.
var myheight int
@@ -776,7 +747,7 @@ func importfile(f constant.Value) *types.Pkg {
base.Errorf("import %s: unexpected package format byte: %v", file, c)
base.ErrorExit()
}
- fingerprint = iimport(importpkg, imp)
+ fingerprint = typecheck.ReadImports(importpkg, imp)
default:
base.Errorf("no import in %q", path_)
diff --git a/src/cmd/compile/internal/gc/mapfile_mmap.go b/src/cmd/compile/internal/gc/mapfile_mmap.go
deleted file mode 100644
index 9483688d68..0000000000
--- a/src/cmd/compile/internal/gc/mapfile_mmap.go
+++ /dev/null
@@ -1,48 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build darwin dragonfly freebsd linux netbsd openbsd
-
-package gc
-
-import (
- "os"
- "reflect"
- "syscall"
- "unsafe"
-)
-
-// TODO(mdempsky): Is there a higher-level abstraction that still
-// works well for iimport?
-
-// mapFile returns length bytes from the file starting at the
-// specified offset as a string.
-func mapFile(f *os.File, offset, length int64) (string, error) {
- // POSIX mmap: "The implementation may require that off is a
- // multiple of the page size."
- x := offset & int64(os.Getpagesize()-1)
- offset -= x
- length += x
-
- buf, err := syscall.Mmap(int(f.Fd()), offset, int(length), syscall.PROT_READ, syscall.MAP_SHARED)
- keepAlive(f)
- if err != nil {
- return "", err
- }
-
- buf = buf[x:]
- pSlice := (*reflect.SliceHeader)(unsafe.Pointer(&buf))
-
- var res string
- pString := (*reflect.StringHeader)(unsafe.Pointer(&res))
-
- pString.Data = pSlice.Data
- pString.Len = pSlice.Len
-
- return res, nil
-}
-
-// keepAlive is a reimplementation of runtime.KeepAlive, which wasn't
-// added until Go 1.7, whereas we need to compile with Go 1.4.
-var keepAlive = func(interface{}) {}
diff --git a/src/cmd/compile/internal/gc/mapfile_read.go b/src/cmd/compile/internal/gc/mapfile_read.go
deleted file mode 100644
index c6f68ed5df..0000000000
--- a/src/cmd/compile/internal/gc/mapfile_read.go
+++ /dev/null
@@ -1,21 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build !darwin,!dragonfly,!freebsd,!linux,!netbsd,!openbsd
-
-package gc
-
-import (
- "io"
- "os"
-)
-
-func mapFile(f *os.File, offset, length int64) (string, error) {
- buf := make([]byte, length)
- _, err := io.ReadFull(io.NewSectionReader(f, offset, length), buf)
- if err != nil {
- return "", err
- }
- return string(buf), nil
-}
diff --git a/src/cmd/compile/internal/gc/mkbuiltin.go b/src/cmd/compile/internal/gc/mkbuiltin.go
deleted file mode 100644
index 38aa601645..0000000000
--- a/src/cmd/compile/internal/gc/mkbuiltin.go
+++ /dev/null
@@ -1,228 +0,0 @@
-// Copyright 2016 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-// Generate builtin.go from builtin/runtime.go.
-
-package main
-
-import (
- "bytes"
- "flag"
- "fmt"
- "go/ast"
- "go/format"
- "go/parser"
- "go/token"
- "io"
- "io/ioutil"
- "log"
- "os"
- "path/filepath"
- "strconv"
- "strings"
-)
-
-var stdout = flag.Bool("stdout", false, "write to stdout instead of builtin.go")
-
-func main() {
- flag.Parse()
-
- var b bytes.Buffer
- fmt.Fprintln(&b, "// Code generated by mkbuiltin.go. DO NOT EDIT.")
- fmt.Fprintln(&b)
- fmt.Fprintln(&b, "package gc")
- fmt.Fprintln(&b)
- fmt.Fprintln(&b, `import (`)
- fmt.Fprintln(&b, ` "cmd/compile/internal/ir"`)
- fmt.Fprintln(&b, ` "cmd/compile/internal/types"`)
- fmt.Fprintln(&b, `)`)
-
- mkbuiltin(&b, "runtime")
-
- out, err := format.Source(b.Bytes())
- if err != nil {
- log.Fatal(err)
- }
- if *stdout {
- _, err = os.Stdout.Write(out)
- } else {
- err = ioutil.WriteFile("builtin.go", out, 0666)
- }
- if err != nil {
- log.Fatal(err)
- }
-}
-
-func mkbuiltin(w io.Writer, name string) {
- fset := token.NewFileSet()
- f, err := parser.ParseFile(fset, filepath.Join("builtin", name+".go"), nil, 0)
- if err != nil {
- log.Fatal(err)
- }
-
- var interner typeInterner
-
- fmt.Fprintf(w, "var %sDecls = [...]struct { name string; tag int; typ int }{\n", name)
- for _, decl := range f.Decls {
- switch decl := decl.(type) {
- case *ast.FuncDecl:
- if decl.Recv != nil {
- log.Fatal("methods unsupported")
- }
- if decl.Body != nil {
- log.Fatal("unexpected function body")
- }
- fmt.Fprintf(w, "{%q, funcTag, %d},\n", decl.Name.Name, interner.intern(decl.Type))
- case *ast.GenDecl:
- if decl.Tok == token.IMPORT {
- if len(decl.Specs) != 1 || decl.Specs[0].(*ast.ImportSpec).Path.Value != "\"unsafe\"" {
- log.Fatal("runtime cannot import other package")
- }
- continue
- }
- if decl.Tok != token.VAR {
- log.Fatal("unhandled declaration kind", decl.Tok)
- }
- for _, spec := range decl.Specs {
- spec := spec.(*ast.ValueSpec)
- if len(spec.Values) != 0 {
- log.Fatal("unexpected values")
- }
- typ := interner.intern(spec.Type)
- for _, name := range spec.Names {
- fmt.Fprintf(w, "{%q, varTag, %d},\n", name.Name, typ)
- }
- }
- default:
- log.Fatal("unhandled decl type", decl)
- }
- }
- fmt.Fprintln(w, "}")
-
- fmt.Fprintln(w)
- fmt.Fprintf(w, "func %sTypes() []*types.Type {\n", name)
- fmt.Fprintf(w, "var typs [%d]*types.Type\n", len(interner.typs))
- for i, typ := range interner.typs {
- fmt.Fprintf(w, "typs[%d] = %s\n", i, typ)
- }
- fmt.Fprintln(w, "return typs[:]")
- fmt.Fprintln(w, "}")
-}
-
-// typeInterner maps Go type expressions to compiler code that
-// constructs the denoted type. It recognizes and reuses common
-// subtype expressions.
-type typeInterner struct {
- typs []string
- hash map[string]int
-}
-
-func (i *typeInterner) intern(t ast.Expr) int {
- x := i.mktype(t)
- v, ok := i.hash[x]
- if !ok {
- v = len(i.typs)
- if i.hash == nil {
- i.hash = make(map[string]int)
- }
- i.hash[x] = v
- i.typs = append(i.typs, x)
- }
- return v
-}
-
-func (i *typeInterner) subtype(t ast.Expr) string {
- return fmt.Sprintf("typs[%d]", i.intern(t))
-}
-
-func (i *typeInterner) mktype(t ast.Expr) string {
- switch t := t.(type) {
- case *ast.Ident:
- switch t.Name {
- case "byte":
- return "types.ByteType"
- case "rune":
- return "types.RuneType"
- }
- return fmt.Sprintf("types.Types[types.T%s]", strings.ToUpper(t.Name))
- case *ast.SelectorExpr:
- if t.X.(*ast.Ident).Name != "unsafe" || t.Sel.Name != "Pointer" {
- log.Fatalf("unhandled type: %#v", t)
- }
- return "types.Types[types.TUNSAFEPTR]"
-
- case *ast.ArrayType:
- if t.Len == nil {
- return fmt.Sprintf("types.NewSlice(%s)", i.subtype(t.Elt))
- }
- return fmt.Sprintf("types.NewArray(%s, %d)", i.subtype(t.Elt), intconst(t.Len))
- case *ast.ChanType:
- dir := "types.Cboth"
- switch t.Dir {
- case ast.SEND:
- dir = "types.Csend"
- case ast.RECV:
- dir = "types.Crecv"
- }
- return fmt.Sprintf("types.NewChan(%s, %s)", i.subtype(t.Value), dir)
- case *ast.FuncType:
- return fmt.Sprintf("functype(nil, %s, %s)", i.fields(t.Params, false), i.fields(t.Results, false))
- case *ast.InterfaceType:
- if len(t.Methods.List) != 0 {
- log.Fatal("non-empty interfaces unsupported")
- }
- return "types.Types[types.TINTER]"
- case *ast.MapType:
- return fmt.Sprintf("types.NewMap(%s, %s)", i.subtype(t.Key), i.subtype(t.Value))
- case *ast.StarExpr:
- return fmt.Sprintf("types.NewPtr(%s)", i.subtype(t.X))
- case *ast.StructType:
- return fmt.Sprintf("tostruct(%s)", i.fields(t.Fields, true))
-
- default:
- log.Fatalf("unhandled type: %#v", t)
- panic("unreachable")
- }
-}
-
-func (i *typeInterner) fields(fl *ast.FieldList, keepNames bool) string {
- if fl == nil || len(fl.List) == 0 {
- return "nil"
- }
- var res []string
- for _, f := range fl.List {
- typ := i.subtype(f.Type)
- if len(f.Names) == 0 {
- res = append(res, fmt.Sprintf("anonfield(%s)", typ))
- } else {
- for _, name := range f.Names {
- if keepNames {
- res = append(res, fmt.Sprintf("namedfield(%q, %s)", name.Name, typ))
- } else {
- res = append(res, fmt.Sprintf("anonfield(%s)", typ))
- }
- }
- }
- }
- return fmt.Sprintf("[]*ir.Field{%s}", strings.Join(res, ", "))
-}
-
-func intconst(e ast.Expr) int64 {
- switch e := e.(type) {
- case *ast.BasicLit:
- if e.Kind != token.INT {
- log.Fatalf("expected INT, got %v", e.Kind)
- }
- x, err := strconv.ParseInt(e.Value, 0, 64)
- if err != nil {
- log.Fatal(err)
- }
- return x
- default:
- log.Fatalf("unhandled expr: %#v", e)
- panic("unreachable")
- }
-}
diff --git a/src/cmd/compile/internal/gc/noder.go b/src/cmd/compile/internal/gc/noder.go
index c83b60dcd4..3e8703f050 100644
--- a/src/cmd/compile/internal/gc/noder.go
+++ b/src/cmd/compile/internal/gc/noder.go
@@ -19,6 +19,7 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/syntax"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/objabi"
"cmd/internal/src"
@@ -160,7 +161,7 @@ type noder struct {
func (p *noder) funcBody(fn *ir.Func, block *syntax.BlockStmt) {
oldScope := p.scope
p.scope = 0
- funchdr(fn)
+ typecheck.StartFuncBody(fn)
if block != nil {
body := p.stmts(block.List)
@@ -173,7 +174,7 @@ func (p *noder) funcBody(fn *ir.Func, block *syntax.BlockStmt) {
fn.Endlineno = base.Pos
}
- funcbody()
+ typecheck.FinishFuncBody()
p.scope = oldScope
}
@@ -261,7 +262,7 @@ func (p *noder) node() {
p.checkUnused(pragma)
}
- Target.Decls = append(Target.Decls, p.decls(p.file.DeclList)...)
+ typecheck.Target.Decls = append(typecheck.Target.Decls, p.decls(p.file.DeclList)...)
base.Pos = src.NoXPos
clearImports()
@@ -273,7 +274,7 @@ func (p *noder) processPragmas() {
p.errorAt(l.pos, "//go:linkname only allowed in Go files that import \"unsafe\"")
continue
}
- n := ir.AsNode(lookup(l.local).Def)
+ n := ir.AsNode(typecheck.Lookup(l.local).Def)
if n == nil || n.Op() != ir.ONAME {
// TODO(mdempsky): Change to p.errorAt before Go 1.17 release.
// base.WarnfAt(p.makeXPos(l.pos), "//go:linkname must refer to declared function or variable (will be an error in Go 1.17)")
@@ -285,7 +286,7 @@ func (p *noder) processPragmas() {
}
n.Sym().Linkname = l.remote
}
- Target.CgoPragmas = append(Target.CgoPragmas, p.pragcgobuf...)
+ typecheck.Target.CgoPragmas = append(typecheck.Target.CgoPragmas, p.pragcgobuf...)
}
func (p *noder) decls(decls []syntax.Decl) (l []ir.Node) {
@@ -342,7 +343,7 @@ func (p *noder) importDecl(imp *syntax.ImportDecl) {
}
if !ipkg.Direct {
- Target.Imports = append(Target.Imports, ipkg)
+ typecheck.Target.Imports = append(typecheck.Target.Imports, ipkg)
}
ipkg.Direct = true
@@ -350,7 +351,7 @@ func (p *noder) importDecl(imp *syntax.ImportDecl) {
if imp.LocalPkgName != nil {
my = p.name(imp.LocalPkgName)
} else {
- my = lookup(ipkg.Name)
+ my = typecheck.Lookup(ipkg.Name)
}
pack := ir.NewPkgName(p.pos(imp), my, ipkg)
@@ -366,7 +367,7 @@ func (p *noder) importDecl(imp *syntax.ImportDecl) {
return
}
if my.Def != nil {
- redeclare(pack.Pos(), my, "as imported package name")
+ typecheck.Redeclared(pack.Pos(), my, "as imported package name")
}
my.Def = pack
my.Lastlineno = pack.Pos()
@@ -401,7 +402,7 @@ func (p *noder) varDecl(decl *syntax.VarDecl) []ir.Node {
}
p.setlineno(decl)
- return variter(names, typ, exprs)
+ return typecheck.DeclVars(names, typ, exprs)
}
// constState tracks state between constant specifiers within a
@@ -449,7 +450,7 @@ func (p *noder) constDecl(decl *syntax.ConstDecl, cs *constState) []ir.Node {
if decl.Values == nil {
v = ir.DeepCopy(n.Pos(), v)
}
- declare(n, dclcontext)
+ typecheck.Declare(n, typecheck.DeclContext)
n.Ntype = typ
n.Defn = v
@@ -469,7 +470,7 @@ func (p *noder) constDecl(decl *syntax.ConstDecl, cs *constState) []ir.Node {
func (p *noder) typeDecl(decl *syntax.TypeDecl) ir.Node {
n := p.declName(ir.OTYPE, decl.Name)
- declare(n, dclcontext)
+ typecheck.Declare(n, typecheck.DeclContext)
// decl.Type may be nil but in that case we got a syntax error during parsing
typ := p.typeExprOrNil(decl.Type)
@@ -514,7 +515,7 @@ func (p *noder) funcDecl(fun *syntax.FuncDecl) ir.Node {
if len(t.Params) > 0 || len(t.Results) > 0 {
base.ErrorfAt(f.Pos(), "func init must have no arguments and no return values")
}
- Target.Inits = append(Target.Inits, f)
+ typecheck.Target.Inits = append(typecheck.Target.Inits, f)
}
if types.LocalPkg.Name == "main" && name.Name == "main" {
@@ -541,7 +542,7 @@ func (p *noder) funcDecl(fun *syntax.FuncDecl) ir.Node {
}
if fun.Recv == nil {
- declare(f.Nname, ir.PFUNC)
+ typecheck.Declare(f.Nname, ir.PFUNC)
}
p.funcBody(f, fun.Body)
@@ -704,7 +705,7 @@ func (p *noder) expr(expr syntax.Expr) ir.Node {
pos, op := p.pos(expr), p.unOp(expr.Op)
switch op {
case ir.OADDR:
- return nodAddrAt(pos, x)
+ return typecheck.NodAddrAt(pos, x)
case ir.ODEREF:
return ir.NewStarExpr(pos, x)
}
@@ -950,7 +951,7 @@ func (p *noder) embedded(typ syntax.Expr) *ir.Field {
}
sym := p.packname(typ)
- n := ir.NewField(p.pos(typ), lookup(sym.Name), importName(sym).(ir.Ntype), nil)
+ n := ir.NewField(p.pos(typ), typecheck.Lookup(sym.Name), importName(sym).(ir.Ntype), nil)
n.Embedded = true
if isStar {
@@ -1136,8 +1137,8 @@ func (p *noder) assignList(expr syntax.Expr, defn ir.Node, colas bool) []ir.Node
}
newOrErr = true
- n := NewName(sym)
- declare(n, dclcontext)
+ n := typecheck.NewName(sym)
+ typecheck.Declare(n, typecheck.DeclContext)
n.Defn = defn
defn.PtrInit().Append(ir.NewDecl(base.Pos, ir.ODCL, n))
res[i] = n
@@ -1245,8 +1246,8 @@ func (p *noder) caseClauses(clauses []*syntax.CaseClause, tswitch *ir.TypeSwitch
n.List.Set(p.exprList(clause.Cases))
}
if tswitch != nil && tswitch.Tag != nil {
- nn := NewName(tswitch.Tag.Sym())
- declare(nn, dclcontext)
+ nn := typecheck.NewName(tswitch.Tag.Sym())
+ typecheck.Declare(nn, typecheck.DeclContext)
n.Vars = []ir.Node{nn}
// keep track of the instances for reporting unused
nn.Defn = tswitch
@@ -1466,7 +1467,7 @@ var tokenForLitKind = [...]token.Token{
}
func (p *noder) name(name *syntax.Name) *types.Sym {
- return lookup(name.Value)
+ return typecheck.Lookup(name.Value)
}
func (p *noder) mkname(name *syntax.Name) ir.Node {
diff --git a/src/cmd/compile/internal/gc/obj.go b/src/cmd/compile/internal/gc/obj.go
index 372277552f..1b4ba50e6b 100644
--- a/src/cmd/compile/internal/gc/obj.go
+++ b/src/cmd/compile/internal/gc/obj.go
@@ -7,6 +7,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/bio"
"cmd/internal/obj"
@@ -117,14 +118,14 @@ func dumpCompilerObj(bout *bio.Writer) {
}
func dumpdata() {
- numExterns := len(Target.Externs)
- numDecls := len(Target.Decls)
+ numExterns := len(typecheck.Target.Externs)
+ numDecls := len(typecheck.Target.Decls)
- dumpglobls(Target.Externs)
+ dumpglobls(typecheck.Target.Externs)
dumpfuncsyms()
addptabs()
- numExports := len(Target.Exports)
- addsignats(Target.Externs)
+ numExports := len(typecheck.Target.Exports)
+ addsignats(typecheck.Target.Externs)
dumpsignats()
dumptabs()
numPTabs, numITabs := CountTabs()
@@ -140,22 +141,22 @@ func dumpdata() {
// In the typical case, we loop 0 or 1 times.
// It was not until issue 24761 that we found any code that required a loop at all.
for {
- for i := numDecls; i < len(Target.Decls); i++ {
- n := Target.Decls[i]
+ for i := numDecls; i < len(typecheck.Target.Decls); i++ {
+ n := typecheck.Target.Decls[i]
if n.Op() == ir.ODCLFUNC {
funccompile(n.(*ir.Func))
}
}
- numDecls = len(Target.Decls)
+ numDecls = len(typecheck.Target.Decls)
compileFunctions()
dumpsignats()
- if numDecls == len(Target.Decls) {
+ if numDecls == len(typecheck.Target.Decls) {
break
}
}
// Dump extra globals.
- dumpglobls(Target.Externs[numExterns:])
+ dumpglobls(typecheck.Target.Externs[numExterns:])
if zerosize > 0 {
zero := ir.Pkgs.Map.Lookup("zero")
@@ -164,7 +165,7 @@ func dumpdata() {
addGCLocals()
- if numExports != len(Target.Exports) {
+ if numExports != len(typecheck.Target.Exports) {
base.Fatalf("Target.Exports changed after compile functions loop")
}
newNumPTabs, newNumITabs := CountTabs()
@@ -179,11 +180,11 @@ func dumpdata() {
func dumpLinkerObj(bout *bio.Writer) {
printObjHeader(bout)
- if len(Target.CgoPragmas) != 0 {
+ if len(typecheck.Target.CgoPragmas) != 0 {
// write empty export section; must be before cgo section
fmt.Fprintf(bout, "\n$$\n\n$$\n\n")
fmt.Fprintf(bout, "\n$$ // cgo\n")
- if err := json.NewEncoder(bout).Encode(Target.CgoPragmas); err != nil {
+ if err := json.NewEncoder(bout).Encode(typecheck.Target.CgoPragmas); err != nil {
base.Fatalf("serializing pragcgobuf: %v", err)
}
fmt.Fprintf(bout, "\n$$\n\n")
@@ -198,7 +199,7 @@ func addptabs() {
if !base.Ctxt.Flag_dynlink || types.LocalPkg.Name != "main" {
return
}
- for _, exportn := range Target.Exports {
+ for _, exportn := range typecheck.Target.Exports {
s := exportn.Sym()
nn := ir.AsNode(s.Def)
if nn == nil {
@@ -474,7 +475,7 @@ func slicedata(pos src.XPos, s string) *ir.Name {
slicedataGen++
symname := fmt.Sprintf(".gobytes.%d", slicedataGen)
sym := types.LocalPkg.Lookup(symname)
- symnode := NewName(sym)
+ symnode := typecheck.NewName(sym)
sym.Def = symnode
lsym := sym.Linksym()
diff --git a/src/cmd/compile/internal/gc/order.go b/src/cmd/compile/internal/gc/order.go
index 3d35094a58..075bcea92c 100644
--- a/src/cmd/compile/internal/gc/order.go
+++ b/src/cmd/compile/internal/gc/order.go
@@ -7,6 +7,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/src"
"fmt"
@@ -63,7 +64,7 @@ func order(fn *ir.Func) {
// append typechecks stmt and appends it to out.
func (o *Order) append(stmt ir.Node) {
- o.out = append(o.out, typecheck(stmt, ctxStmt))
+ o.out = append(o.out, typecheck.Stmt(stmt))
}
// newTemp allocates a new temporary with the given type,
@@ -85,7 +86,7 @@ func (o *Order) newTemp(t *types.Type, clear bool) *ir.Name {
}
}
if v == nil {
- v = temp(t)
+ v = typecheck.Temp(t)
}
if clear {
o.append(ir.NewAssignStmt(base.Pos, v, nil))
@@ -142,7 +143,7 @@ func (o *Order) cheapExpr(n ir.Node) ir.Node {
}
a := ir.SepCopy(n).(*ir.UnaryExpr)
a.X = l
- return typecheck(a, ctxExpr)
+ return typecheck.Expr(a)
}
return o.copyExpr(n)
@@ -168,7 +169,7 @@ func (o *Order) safeExpr(n ir.Node) ir.Node {
}
a := ir.SepCopy(n).(*ir.UnaryExpr)
a.X = l
- return typecheck(a, ctxExpr)
+ return typecheck.Expr(a)
case ir.ODOT:
n := n.(*ir.SelectorExpr)
@@ -178,7 +179,7 @@ func (o *Order) safeExpr(n ir.Node) ir.Node {
}
a := ir.SepCopy(n).(*ir.SelectorExpr)
a.X = l
- return typecheck(a, ctxExpr)
+ return typecheck.Expr(a)
case ir.ODOTPTR:
n := n.(*ir.SelectorExpr)
@@ -188,7 +189,7 @@ func (o *Order) safeExpr(n ir.Node) ir.Node {
}
a := ir.SepCopy(n).(*ir.SelectorExpr)
a.X = l
- return typecheck(a, ctxExpr)
+ return typecheck.Expr(a)
case ir.ODEREF:
n := n.(*ir.StarExpr)
@@ -198,7 +199,7 @@ func (o *Order) safeExpr(n ir.Node) ir.Node {
}
a := ir.SepCopy(n).(*ir.StarExpr)
a.X = l
- return typecheck(a, ctxExpr)
+ return typecheck.Expr(a)
case ir.OINDEX, ir.OINDEXMAP:
n := n.(*ir.IndexExpr)
@@ -215,7 +216,7 @@ func (o *Order) safeExpr(n ir.Node) ir.Node {
a := ir.SepCopy(n).(*ir.IndexExpr)
a.X = l
a.Index = r
- return typecheck(a, ctxExpr)
+ return typecheck.Expr(a)
default:
base.Fatalf("order.safeExpr %v", n.Op())
@@ -241,7 +242,7 @@ func isaddrokay(n ir.Node) bool {
func (o *Order) addrTemp(n ir.Node) ir.Node {
if n.Op() == ir.OLITERAL || n.Op() == ir.ONIL {
// TODO: expand this to all static composite literal nodes?
- n = defaultlit(n, nil)
+ n = typecheck.DefaultLit(n, nil)
types.CalcSize(n.Type())
vstat := readonlystaticname(n.Type())
var s InitSchedule
@@ -249,7 +250,7 @@ func (o *Order) addrTemp(n ir.Node) ir.Node {
if s.out != nil {
base.Fatalf("staticassign of const generated code: %+v", n)
}
- vstat = typecheck(vstat, ctxExpr).(*ir.Name)
+ vstat = typecheck.Expr(vstat).(*ir.Name)
return vstat
}
if isaddrokay(n) {
@@ -336,7 +337,7 @@ func (o *Order) cleanTempNoPop(mark ordermarker) []ir.Node {
var out []ir.Node
for i := len(o.temp) - 1; i >= int(mark); i-- {
n := o.temp[i]
- out = append(out, typecheck(ir.NewUnaryExpr(base.Pos, ir.OVARKILL, n), ctxStmt))
+ out = append(out, typecheck.Stmt(ir.NewUnaryExpr(base.Pos, ir.OVARKILL, n)))
}
return out
}
@@ -388,7 +389,7 @@ func orderMakeSliceCopy(s []ir.Node) {
mk.Cap = cp.Y
// Set bounded when m = OMAKESLICE([]T, len(s)); OCOPY(m, s)
mk.SetBounded(mk.Len.Op() == ir.OLEN && ir.SameSafeExpr(mk.Len.(*ir.UnaryExpr).X, cp.Y))
- as.Y = typecheck(mk, ctxExpr)
+ as.Y = typecheck.Expr(mk)
s[1] = nil // remove separate copy call
}
@@ -495,7 +496,7 @@ func (o *Order) call(nn ir.Node) {
}
n := nn.(*ir.CallExpr)
- fixVariadicCall(n)
+ typecheck.FixVariadicCall(n)
n.X = o.expr(n.X, nil)
o.exprList(n.Args)
@@ -513,7 +514,7 @@ func (o *Order) call(nn ir.Node) {
x := o.copyExpr(arg.X)
arg.X = x
x.Name().SetAddrtaken(true) // ensure SSA keeps the x variable
- n.Body.Append(typecheck(ir.NewUnaryExpr(base.Pos, ir.OVARLIVE, x), ctxStmt))
+ n.Body.Append(typecheck.Stmt(ir.NewUnaryExpr(base.Pos, ir.OVARLIVE, x)))
}
}
}
@@ -584,7 +585,7 @@ func (o *Order) mapAssign(n ir.Node) {
t := o.newTemp(m.Type(), false)
n.Lhs[i] = t
a := ir.NewAssignStmt(base.Pos, m, t)
- post = append(post, typecheck(a, ctxStmt))
+ post = append(post, typecheck.Stmt(a))
}
}
@@ -653,8 +654,8 @@ func (o *Order) stmt(n ir.Node) {
l2.Assigned = false
}
l2 = o.copyExpr(l2)
- r := o.expr(typecheck(ir.NewBinaryExpr(n.Pos(), n.AsOp, l2, n.Y), ctxExpr), nil)
- as := typecheck(ir.NewAssignStmt(n.Pos(), l1, r), ctxStmt)
+ r := o.expr(typecheck.Expr(ir.NewBinaryExpr(n.Pos(), n.AsOp, l2, n.Y)), nil)
+ as := typecheck.Stmt(ir.NewAssignStmt(n.Pos(), l1, r))
o.mapAssign(as)
o.cleanTemp(t)
return
@@ -858,7 +859,7 @@ func (o *Order) stmt(n ir.Node) {
if r.Type().IsString() && r.Type() != types.Types[types.TSTRING] {
r = ir.NewConvExpr(base.Pos, ir.OCONV, nil, r)
r.SetType(types.Types[types.TSTRING])
- r = typecheck(r, ctxExpr)
+ r = typecheck.Expr(r)
}
n.X = o.copyExpr(r)
@@ -949,11 +950,11 @@ func (o *Order) stmt(n ir.Node) {
if len(init) > 0 && init[0].Op() == ir.ODCL && init[0].(*ir.Decl).X == n {
init = init[1:]
}
- dcl := typecheck(ir.NewDecl(base.Pos, ir.ODCL, n), ctxStmt)
+ dcl := typecheck.Stmt(ir.NewDecl(base.Pos, ir.ODCL, n))
ncas.PtrInit().Append(dcl)
}
tmp := o.newTemp(t, t.HasPointers())
- as := typecheck(ir.NewAssignStmt(base.Pos, n, conv(tmp, n.Type())), ctxStmt)
+ as := typecheck.Stmt(ir.NewAssignStmt(base.Pos, n, typecheck.Conv(tmp, n.Type())))
ncas.PtrInit().Append(as)
r.Lhs[i] = tmp
}
@@ -1217,7 +1218,7 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
// Evaluate left-hand side.
lhs := o.expr(n.X, nil)
- o.out = append(o.out, typecheck(ir.NewAssignStmt(base.Pos, r, lhs), ctxStmt))
+ o.out = append(o.out, typecheck.Stmt(ir.NewAssignStmt(base.Pos, r, lhs)))
// Evaluate right-hand side, save generated code.
saveout := o.out
@@ -1225,7 +1226,7 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
t := o.markTemp()
o.edge()
rhs := o.expr(n.Y, nil)
- o.out = append(o.out, typecheck(ir.NewAssignStmt(base.Pos, r, rhs), ctxStmt))
+ o.out = append(o.out, typecheck.Stmt(ir.NewAssignStmt(base.Pos, r, rhs)))
o.cleanTemp(t)
gen := o.out
o.out = saveout
@@ -1307,7 +1308,7 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
case ir.OCLOSURE:
n := n.(*ir.ClosureExpr)
if n.Transient() && len(n.Func.ClosureVars) > 0 {
- n.Prealloc = o.newTemp(closureType(n), false)
+ n.Prealloc = o.newTemp(typecheck.ClosureType(n), false)
}
return n
@@ -1315,7 +1316,7 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
n := n.(*ir.CallPartExpr)
n.X = o.expr(n.X, nil)
if n.Transient() {
- t := partialCallType(n)
+ t := typecheck.PartialCallType(n)
n.Prealloc = o.newTemp(t, false)
}
return n
@@ -1415,13 +1416,13 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
// Emit the creation of the map (with all its static entries).
m := o.newTemp(n.Type(), false)
as := ir.NewAssignStmt(base.Pos, m, n)
- typecheck(as, ctxStmt)
+ typecheck.Stmt(as)
o.stmt(as)
// Emit eval+insert of dynamic entries, one at a time.
for _, r := range dynamics {
as := ir.NewAssignStmt(base.Pos, ir.NewIndexExpr(base.Pos, m, r.Key), r.Value)
- typecheck(as, ctxStmt) // Note: this converts the OINDEX to an OINDEXMAP
+ typecheck.Stmt(as) // Note: this converts the OINDEX to an OINDEXMAP
o.stmt(as)
}
return m
@@ -1455,7 +1456,7 @@ func (o *Order) as2(n *ir.AssignListStmt) {
as := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
as.Lhs.Set(left)
as.Rhs.Set(tmplist)
- o.stmt(typecheck(as, ctxStmt))
+ o.stmt(typecheck.Stmt(as))
}
// okAs2 orders OAS2XXX with ok.
@@ -1475,12 +1476,12 @@ func (o *Order) okAs2(n *ir.AssignListStmt) {
if tmp1 != nil {
r := ir.NewAssignStmt(base.Pos, n.Lhs[0], tmp1)
- o.mapAssign(typecheck(r, ctxStmt))
+ o.mapAssign(typecheck.Stmt(r))
n.Lhs[0] = tmp1
}
if tmp2 != nil {
- r := ir.NewAssignStmt(base.Pos, n.Lhs[1], conv(tmp2, n.Lhs[1].Type()))
- o.mapAssign(typecheck(r, ctxStmt))
+ r := ir.NewAssignStmt(base.Pos, n.Lhs[1], typecheck.Conv(tmp2, n.Lhs[1].Type()))
+ o.mapAssign(typecheck.Stmt(r))
n.Lhs[1] = tmp2
}
}
diff --git a/src/cmd/compile/internal/gc/pgen.go b/src/cmd/compile/internal/gc/pgen.go
index 337556ea41..c0f3326454 100644
--- a/src/cmd/compile/internal/gc/pgen.go
+++ b/src/cmd/compile/internal/gc/pgen.go
@@ -8,6 +8,7 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/ssa"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/dwarf"
"cmd/internal/obj"
@@ -146,7 +147,7 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
}
if f.Config.NeedsFpScratch && scratchUsed {
- s.scratchFpMem = tempAt(src.NoXPos, s.curfn, types.Types[types.TUINT64])
+ s.scratchFpMem = typecheck.TempAt(src.NoXPos, s.curfn, types.Types[types.TUINT64])
}
sort.Sort(byStackVar(fn.Dcl))
@@ -214,11 +215,11 @@ func funccompile(fn *ir.Func) {
return
}
- dclcontext = ir.PAUTO
+ typecheck.DeclContext = ir.PAUTO
ir.CurFunc = fn
compile(fn)
ir.CurFunc = nil
- dclcontext = ir.PEXTERN
+ typecheck.DeclContext = ir.PEXTERN
}
func compile(fn *ir.Func) {
diff --git a/src/cmd/compile/internal/gc/pgen_test.go b/src/cmd/compile/internal/gc/pgen_test.go
index 1170db2681..95c4b24fa1 100644
--- a/src/cmd/compile/internal/gc/pgen_test.go
+++ b/src/cmd/compile/internal/gc/pgen_test.go
@@ -6,6 +6,7 @@ package gc
import (
"cmd/compile/internal/ir"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/src"
"reflect"
@@ -41,7 +42,7 @@ func TestCmpstackvar(t *testing.T) {
if s == nil {
s = &types.Sym{Name: "."}
}
- n := NewName(s)
+ n := typecheck.NewName(s)
n.SetType(t)
n.SetFrameOffset(xoffset)
n.Class_ = cl
@@ -156,7 +157,7 @@ func TestCmpstackvar(t *testing.T) {
func TestStackvarSort(t *testing.T) {
nod := func(xoffset int64, t *types.Type, s *types.Sym, cl ir.Class) *ir.Name {
- n := NewName(s)
+ n := typecheck.NewName(s)
n.SetType(t)
n.SetFrameOffset(xoffset)
n.Class_ = cl
diff --git a/src/cmd/compile/internal/gc/range.go b/src/cmd/compile/internal/gc/range.go
index a9447189c2..c040811932 100644
--- a/src/cmd/compile/internal/gc/range.go
+++ b/src/cmd/compile/internal/gc/range.go
@@ -7,136 +7,12 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/sys"
"unicode/utf8"
)
-// range
-func typecheckrange(n *ir.RangeStmt) {
- // Typechecking order is important here:
- // 0. first typecheck range expression (slice/map/chan),
- // it is evaluated only once and so logically it is not part of the loop.
- // 1. typecheck produced values,
- // this part can declare new vars and so it must be typechecked before body,
- // because body can contain a closure that captures the vars.
- // 2. decldepth++ to denote loop body.
- // 3. typecheck body.
- // 4. decldepth--.
- typecheckrangeExpr(n)
-
- // second half of dance, the first half being typecheckrangeExpr
- n.SetTypecheck(1)
- ls := n.Vars
- for i1, n1 := range ls {
- if n1.Typecheck() == 0 {
- ls[i1] = typecheck(ls[i1], ctxExpr|ctxAssign)
- }
- }
-
- decldepth++
- typecheckslice(n.Body, ctxStmt)
- decldepth--
-}
-
-func typecheckrangeExpr(n *ir.RangeStmt) {
- n.X = typecheck(n.X, ctxExpr)
-
- t := n.X.Type()
- if t == nil {
- return
- }
- // delicate little dance. see typecheckas2
- ls := n.Vars
- for i1, n1 := range ls {
- if !ir.DeclaredBy(n1, n) {
- ls[i1] = typecheck(ls[i1], ctxExpr|ctxAssign)
- }
- }
-
- if t.IsPtr() && t.Elem().IsArray() {
- t = t.Elem()
- }
- n.SetType(t)
-
- var t1, t2 *types.Type
- toomany := false
- switch t.Kind() {
- default:
- base.ErrorfAt(n.Pos(), "cannot range over %L", n.X)
- return
-
- case types.TARRAY, types.TSLICE:
- t1 = types.Types[types.TINT]
- t2 = t.Elem()
-
- case types.TMAP:
- t1 = t.Key()
- t2 = t.Elem()
-
- case types.TCHAN:
- if !t.ChanDir().CanRecv() {
- base.ErrorfAt(n.Pos(), "invalid operation: range %v (receive from send-only type %v)", n.X, n.X.Type())
- return
- }
-
- t1 = t.Elem()
- t2 = nil
- if len(n.Vars) == 2 {
- toomany = true
- }
-
- case types.TSTRING:
- t1 = types.Types[types.TINT]
- t2 = types.RuneType
- }
-
- if len(n.Vars) > 2 || toomany {
- base.ErrorfAt(n.Pos(), "too many variables in range")
- }
-
- var v1, v2 ir.Node
- if len(n.Vars) != 0 {
- v1 = n.Vars[0]
- }
- if len(n.Vars) > 1 {
- v2 = n.Vars[1]
- }
-
- // this is not only an optimization but also a requirement in the spec.
- // "if the second iteration variable is the blank identifier, the range
- // clause is equivalent to the same clause with only the first variable
- // present."
- if ir.IsBlank(v2) {
- if v1 != nil {
- n.Vars = []ir.Node{v1}
- }
- v2 = nil
- }
-
- if v1 != nil {
- if ir.DeclaredBy(v1, n) {
- v1.SetType(t1)
- } else if v1.Type() != nil {
- if op, why := assignop(t1, v1.Type()); op == ir.OXXX {
- base.ErrorfAt(n.Pos(), "cannot assign type %v to %L in range%s", t1, v1, why)
- }
- }
- checkassign(n, v1)
- }
-
- if v2 != nil {
- if ir.DeclaredBy(v2, n) {
- v2.SetType(t2)
- } else if v2.Type() != nil {
- if op, why := assignop(t2, v2.Type()); op == ir.OXXX {
- base.ErrorfAt(n.Pos(), "cannot assign type %v to %L in range%s", t2, v2, why)
- }
- }
- checkassign(n, v2)
- }
-}
-
func cheapComputableIndex(width int64) bool {
switch thearch.LinkArch.Family {
// MIPS does not have R+R addressing
@@ -221,8 +97,8 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
// order.stmt arranged for a copy of the array/slice variable if needed.
ha := a
- hv1 := temp(types.Types[types.TINT])
- hn := temp(types.Types[types.TINT])
+ hv1 := typecheck.Temp(types.Types[types.TINT])
+ hn := typecheck.Temp(types.Types[types.TINT])
init = append(init, ir.NewAssignStmt(base.Pos, hv1, nil))
init = append(init, ir.NewAssignStmt(base.Pos, hn, ir.NewUnaryExpr(base.Pos, ir.OLEN, ha)))
@@ -271,10 +147,10 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
ifGuard.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, hv1, hn)
nfor.SetOp(ir.OFORUNTIL)
- hp := temp(types.NewPtr(nrange.Type().Elem()))
+ hp := typecheck.Temp(types.NewPtr(nrange.Type().Elem()))
tmp := ir.NewIndexExpr(base.Pos, ha, ir.NewInt(0))
tmp.SetBounded(true)
- init = append(init, ir.NewAssignStmt(base.Pos, hp, nodAddr(tmp)))
+ init = append(init, ir.NewAssignStmt(base.Pos, hp, typecheck.NodAddr(tmp)))
// Use OAS2 to correctly handle assignments
// of the form "v1, a[v1] := range".
@@ -289,7 +165,7 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
// advancing the pointer is safe and won't go past the
// end of the allocation.
as := ir.NewAssignStmt(base.Pos, hp, addptr(hp, t.Elem().Width))
- nfor.Late = []ir.Node{typecheck(as, ctxStmt)}
+ nfor.Late = []ir.Node{typecheck.Stmt(as)}
case types.TMAP:
// order.stmt allocated the iterator for us.
@@ -301,15 +177,15 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
keysym := th.Field(0).Sym // depends on layout of iterator struct. See reflect.go:hiter
elemsym := th.Field(1).Sym // ditto
- fn := syslook("mapiterinit")
+ fn := typecheck.LookupRuntime("mapiterinit")
- fn = substArgTypes(fn, t.Key(), t.Elem(), th)
- init = append(init, mkcall1(fn, nil, nil, typename(t), ha, nodAddr(hit)))
- nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, ir.NewSelectorExpr(base.Pos, ir.ODOT, hit, keysym), nodnil())
+ fn = typecheck.SubstArgTypes(fn, t.Key(), t.Elem(), th)
+ init = append(init, mkcall1(fn, nil, nil, typename(t), ha, typecheck.NodAddr(hit)))
+ nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, ir.NewSelectorExpr(base.Pos, ir.ODOT, hit, keysym), typecheck.NodNil())
- fn = syslook("mapiternext")
- fn = substArgTypes(fn, th)
- nfor.Post = mkcall1(fn, nil, nil, nodAddr(hit))
+ fn = typecheck.LookupRuntime("mapiternext")
+ fn = typecheck.SubstArgTypes(fn, th)
+ nfor.Post = mkcall1(fn, nil, nil, typecheck.NodAddr(hit))
key := ir.NewStarExpr(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, hit, keysym))
if v1 == nil {
@@ -328,12 +204,12 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
// order.stmt arranged for a copy of the channel variable.
ha := a
- hv1 := temp(t.Elem())
+ hv1 := typecheck.Temp(t.Elem())
hv1.SetTypecheck(1)
if t.Elem().HasPointers() {
init = append(init, ir.NewAssignStmt(base.Pos, hv1, nil))
}
- hb := temp(types.Types[types.TBOOL])
+ hb := typecheck.Temp(types.Types[types.TBOOL])
nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, hb, ir.NewBool(false))
a := ir.NewAssignListStmt(base.Pos, ir.OAS2RECV, nil, nil)
@@ -370,9 +246,9 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
// order.stmt arranged for a copy of the string variable.
ha := a
- hv1 := temp(types.Types[types.TINT])
- hv1t := temp(types.Types[types.TINT])
- hv2 := temp(types.RuneType)
+ hv1 := typecheck.Temp(types.Types[types.TINT])
+ hv1t := typecheck.Temp(types.Types[types.TINT])
+ hv2 := typecheck.Temp(types.RuneType)
// hv1 := 0
init = append(init, ir.NewAssignStmt(base.Pos, hv1, nil))
@@ -388,7 +264,7 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
// hv2 := rune(ha[hv1])
nind := ir.NewIndexExpr(base.Pos, ha, hv1)
nind.SetBounded(true)
- body = append(body, ir.NewAssignStmt(base.Pos, hv2, conv(nind, types.RuneType)))
+ body = append(body, ir.NewAssignStmt(base.Pos, hv2, typecheck.Conv(nind, types.RuneType)))
// if hv2 < utf8.RuneSelf
nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
@@ -403,7 +279,7 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
// hv2, hv1 = decoderune(ha, hv1)
eif.Lhs = []ir.Node{hv2, hv1}
- fn := syslook("decoderune")
+ fn := typecheck.LookupRuntime("decoderune")
eif.Rhs = []ir.Node{mkcall1(fn, fn.Type().Results(), nil, ha, hv1)}
body = append(body, nif)
@@ -422,21 +298,21 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
}
}
- typecheckslice(init, ctxStmt)
+ typecheck.Stmts(init)
if ifGuard != nil {
ifGuard.PtrInit().Append(init...)
- ifGuard = typecheck(ifGuard, ctxStmt).(*ir.IfStmt)
+ ifGuard = typecheck.Stmt(ifGuard).(*ir.IfStmt)
} else {
nfor.PtrInit().Append(init...)
}
- typecheckslice(nfor.Cond.Init(), ctxStmt)
+ typecheck.Stmts(nfor.Cond.Init())
- nfor.Cond = typecheck(nfor.Cond, ctxExpr)
- nfor.Cond = defaultlit(nfor.Cond, nil)
- nfor.Post = typecheck(nfor.Post, ctxStmt)
- typecheckslice(body, ctxStmt)
+ nfor.Cond = typecheck.Expr(nfor.Cond)
+ nfor.Cond = typecheck.DefaultLit(nfor.Cond, nil)
+ nfor.Post = typecheck.Stmt(nfor.Post)
+ typecheck.Stmts(body)
nfor.Body.Append(body...)
nfor.Body.Append(nrange.Body...)
@@ -505,10 +381,10 @@ func mapClear(m ir.Node) ir.Node {
t := m.Type()
// instantiate mapclear(typ *type, hmap map[any]any)
- fn := syslook("mapclear")
- fn = substArgTypes(fn, t.Key(), t.Elem())
+ fn := typecheck.LookupRuntime("mapclear")
+ fn = typecheck.SubstArgTypes(fn, t.Key(), t.Elem())
n := mkcall1(fn, nil, nil, typename(t), m)
- return walkstmt(typecheck(n, ctxStmt))
+ return walkstmt(typecheck.Stmt(n))
}
// Lower n into runtime·memclr if possible, for
@@ -566,16 +442,16 @@ func arrayClear(loop *ir.RangeStmt, v1, v2, a ir.Node) ir.Node {
n.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), ir.NewInt(0))
// hp = &a[0]
- hp := temp(types.Types[types.TUNSAFEPTR])
+ hp := typecheck.Temp(types.Types[types.TUNSAFEPTR])
ix := ir.NewIndexExpr(base.Pos, a, ir.NewInt(0))
ix.SetBounded(true)
- addr := convnop(nodAddr(ix), types.Types[types.TUNSAFEPTR])
+ addr := typecheck.ConvNop(typecheck.NodAddr(ix), types.Types[types.TUNSAFEPTR])
n.Body.Append(ir.NewAssignStmt(base.Pos, hp, addr))
// hn = len(a) * sizeof(elem(a))
- hn := temp(types.Types[types.TUINTPTR])
- mul := conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), ir.NewInt(elemsize)), types.Types[types.TUINTPTR])
+ hn := typecheck.Temp(types.Types[types.TUINTPTR])
+ mul := typecheck.Conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), ir.NewInt(elemsize)), types.Types[types.TUINTPTR])
n.Body.Append(ir.NewAssignStmt(base.Pos, hn, mul))
var fn ir.Node
@@ -595,9 +471,9 @@ func arrayClear(loop *ir.RangeStmt, v1, v2, a ir.Node) ir.Node {
n.Body.Append(v1)
- n.Cond = typecheck(n.Cond, ctxExpr)
- n.Cond = defaultlit(n.Cond, nil)
- typecheckslice(n.Body, ctxStmt)
+ n.Cond = typecheck.Expr(n.Cond)
+ n.Cond = typecheck.DefaultLit(n.Cond, nil)
+ typecheck.Stmts(n.Body)
return walkstmt(n)
}
diff --git a/src/cmd/compile/internal/gc/reflect.go b/src/cmd/compile/internal/gc/reflect.go
index 987b2d6ee2..7594884f9f 100644
--- a/src/cmd/compile/internal/gc/reflect.go
+++ b/src/cmd/compile/internal/gc/reflect.go
@@ -7,6 +7,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/gcprog"
"cmd/internal/obj"
@@ -339,36 +340,6 @@ func deferstruct(stksize int64) *types.Type {
return s
}
-// f is method type, with receiver.
-// return function type, receiver as first argument (or not).
-func methodfunc(f *types.Type, receiver *types.Type) *types.Type {
- inLen := f.Params().Fields().Len()
- if receiver != nil {
- inLen++
- }
- in := make([]*ir.Field, 0, inLen)
-
- if receiver != nil {
- d := ir.NewField(base.Pos, nil, nil, receiver)
- in = append(in, d)
- }
-
- for _, t := range f.Params().Fields().Slice() {
- d := ir.NewField(base.Pos, nil, nil, t.Type)
- d.IsDDD = t.IsDDD()
- in = append(in, d)
- }
-
- outLen := f.Results().Fields().Len()
- out := make([]*ir.Field, 0, outLen)
- for _, t := range f.Results().Fields().Slice() {
- d := ir.NewField(base.Pos, nil, nil, t.Type)
- out = append(out, d)
- }
-
- return functype(nil, in, out)
-}
-
// methods returns the methods of the non-interface type t, sorted by name.
// Generates stub functions as needed.
func methods(t *types.Type) []*Sig {
@@ -378,7 +349,7 @@ func methods(t *types.Type) []*Sig {
if mt == nil {
return nil
}
- expandmeth(mt)
+ typecheck.CalcMethods(mt)
// type stored in interface word
it := t
@@ -418,8 +389,8 @@ func methods(t *types.Type) []*Sig {
name: method,
isym: ir.MethodSym(it, method),
tsym: ir.MethodSym(t, method),
- type_: methodfunc(f.Type, t),
- mtype: methodfunc(f.Type, nil),
+ type_: typecheck.NewMethodType(f.Type, t),
+ mtype: typecheck.NewMethodType(f.Type, nil),
}
ms = append(ms, sig)
@@ -463,7 +434,7 @@ func imethods(t *types.Type) []*Sig {
sig := &Sig{
name: f.Sym,
mtype: f.Type,
- type_: methodfunc(f.Type, nil),
+ type_: typecheck.NewMethodType(f.Type, nil),
}
methods = append(methods, sig)
@@ -916,7 +887,7 @@ func typename(t *types.Type) *ir.AddrExpr {
s.Def = n
}
- n := nodAddr(ir.AsNode(s.Def))
+ n := typecheck.NodAddr(ir.AsNode(s.Def))
n.SetType(types.NewPtr(s.Def.Type()))
n.SetTypecheck(1)
return n
@@ -928,7 +899,7 @@ func itabname(t, itype *types.Type) *ir.AddrExpr {
}
s := ir.Pkgs.Itab.Lookup(t.ShortString() + "," + itype.ShortString())
if s.Def == nil {
- n := NewName(s)
+ n := typecheck.NewName(s)
n.SetType(types.Types[types.TUINT8])
n.Class_ = ir.PEXTERN
n.SetTypecheck(1)
@@ -936,7 +907,7 @@ func itabname(t, itype *types.Type) *ir.AddrExpr {
itabs = append(itabs, itabEntry{t: t, itype: itype, lsym: s.Linksym()})
}
- n := nodAddr(ir.AsNode(s.Def))
+ n := typecheck.NodAddr(ir.AsNode(s.Def))
n.SetType(types.NewPtr(s.Def.Type()))
n.SetTypecheck(1)
return n
@@ -1033,7 +1004,7 @@ func dtypesym(t *types.Type) *obj.LSym {
if base.Ctxt.Pkgpath != "runtime" || (tbase != types.Types[tbase.Kind()] && tbase != types.ByteType && tbase != types.RuneType && tbase != types.ErrorType) { // int, float, etc
// named types from other files are defined only by those files
if tbase.Sym() != nil && tbase.Sym().Pkg != types.LocalPkg {
- if i := BaseTypeIndex(t); i >= 0 {
+ if i := typecheck.BaseTypeIndex(t); i >= 0 {
lsym.Pkg = tbase.Sym().Pkg.Prefix
lsym.SymIdx = int32(i)
lsym.Set(obj.AttrIndexed, true)
@@ -1492,7 +1463,7 @@ func dumpbasictypes() {
// The latter is the type of an auto-generated wrapper.
dtypesym(types.NewPtr(types.ErrorType))
- dtypesym(functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, types.ErrorType)}, []*ir.Field{ir.NewField(base.Pos, nil, nil, types.Types[types.TSTRING])}))
+ dtypesym(typecheck.NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, types.ErrorType)}, []*ir.Field{ir.NewField(base.Pos, nil, nil, types.Types[types.TSTRING])}))
// add paths for runtime and main, which 6l imports implicitly.
dimportpath(ir.Pkgs.Runtime)
@@ -1744,13 +1715,13 @@ func zeroaddr(size int64) ir.Node {
}
s := ir.Pkgs.Map.Lookup("zero")
if s.Def == nil {
- x := NewName(s)
+ x := typecheck.NewName(s)
x.SetType(types.Types[types.TUINT8])
x.Class_ = ir.PEXTERN
x.SetTypecheck(1)
s.Def = x
}
- z := nodAddr(ir.AsNode(s.Def))
+ z := typecheck.NodAddr(ir.AsNode(s.Def))
z.SetType(types.NewPtr(types.Types[types.TUINT8]))
z.SetTypecheck(1)
return z
diff --git a/src/cmd/compile/internal/gc/select.go b/src/cmd/compile/internal/gc/select.go
index 67a2cfd312..51bb1e5355 100644
--- a/src/cmd/compile/internal/gc/select.go
+++ b/src/cmd/compile/internal/gc/select.go
@@ -7,92 +7,10 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
)
-// select
-func typecheckselect(sel *ir.SelectStmt) {
- var def ir.Node
- lno := ir.SetPos(sel)
- typecheckslice(sel.Init(), ctxStmt)
- for _, ncase := range sel.Cases {
- ncase := ncase.(*ir.CaseStmt)
-
- if len(ncase.List) == 0 {
- // default
- if def != nil {
- base.ErrorfAt(ncase.Pos(), "multiple defaults in select (first at %v)", ir.Line(def))
- } else {
- def = ncase
- }
- } else if len(ncase.List) > 1 {
- base.ErrorfAt(ncase.Pos(), "select cases cannot be lists")
- } else {
- ncase.List[0] = typecheck(ncase.List[0], ctxStmt)
- n := ncase.List[0]
- ncase.Comm = n
- ncase.List.Set(nil)
- oselrecv2 := func(dst, recv ir.Node, colas bool) {
- n := ir.NewAssignListStmt(n.Pos(), ir.OSELRECV2, nil, nil)
- n.Lhs = []ir.Node{dst, ir.BlankNode}
- n.Rhs = []ir.Node{recv}
- n.Def = colas
- n.SetTypecheck(1)
- ncase.Comm = n
- }
- switch n.Op() {
- default:
- pos := n.Pos()
- if n.Op() == ir.ONAME {
- // We don't have the right position for ONAME nodes (see #15459 and
- // others). Using ncase.Pos for now as it will provide the correct
- // line number (assuming the expression follows the "case" keyword
- // on the same line). This matches the approach before 1.10.
- pos = ncase.Pos()
- }
- base.ErrorfAt(pos, "select case must be receive, send or assign recv")
-
- case ir.OAS:
- // convert x = <-c into x, _ = <-c
- // remove implicit conversions; the eventual assignment
- // will reintroduce them.
- n := n.(*ir.AssignStmt)
- if r := n.Y; r.Op() == ir.OCONVNOP || r.Op() == ir.OCONVIFACE {
- r := r.(*ir.ConvExpr)
- if r.Implicit() {
- n.Y = r.X
- }
- }
- if n.Y.Op() != ir.ORECV {
- base.ErrorfAt(n.Pos(), "select assignment must have receive on right hand side")
- break
- }
- oselrecv2(n.X, n.Y, n.Def)
-
- case ir.OAS2RECV:
- n := n.(*ir.AssignListStmt)
- if n.Rhs[0].Op() != ir.ORECV {
- base.ErrorfAt(n.Pos(), "select assignment must have receive on right hand side")
- break
- }
- n.SetOp(ir.OSELRECV2)
-
- case ir.ORECV:
- // convert <-c into _, _ = <-c
- n := n.(*ir.UnaryExpr)
- oselrecv2(ir.BlankNode, n, false)
-
- case ir.OSEND:
- break
- }
- }
-
- typecheckslice(ncase.Body, ctxStmt)
- }
-
- base.Pos = lno
-}
-
func walkselect(sel *ir.SelectStmt) {
lno := ir.SetPos(sel)
if len(sel.Compiled) != 0 {
@@ -167,14 +85,14 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
switch n.Op() {
case ir.OSEND:
n := n.(*ir.SendStmt)
- n.Value = nodAddr(n.Value)
- n.Value = typecheck(n.Value, ctxExpr)
+ n.Value = typecheck.NodAddr(n.Value)
+ n.Value = typecheck.Expr(n.Value)
case ir.OSELRECV2:
n := n.(*ir.AssignListStmt)
if !ir.IsBlank(n.Lhs[0]) {
- n.Lhs[0] = nodAddr(n.Lhs[0])
- n.Lhs[0] = typecheck(n.Lhs[0], ctxExpr)
+ n.Lhs[0] = typecheck.NodAddr(n.Lhs[0])
+ n.Lhs[0] = typecheck.Expr(n.Lhs[0])
}
}
}
@@ -207,7 +125,7 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
ch := recv.X
elem := n.Lhs[0]
if ir.IsBlank(elem) {
- elem = nodnil()
+ elem = typecheck.NodNil()
}
if ir.IsBlank(n.Lhs[1]) {
// if selectnbrecv(&v, c) { body } else { default body }
@@ -215,12 +133,12 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
} else {
// TODO(cuonglm): make this use selectnbrecv()
// if selectnbrecv2(&v, &received, c) { body } else { default body }
- receivedp := typecheck(nodAddr(n.Lhs[1]), ctxExpr)
+ receivedp := typecheck.Expr(typecheck.NodAddr(n.Lhs[1]))
call = mkcall1(chanfn("selectnbrecv2", 2, ch.Type()), types.Types[types.TBOOL], r.PtrInit(), elem, receivedp, ch)
}
}
- r.Cond = typecheck(call, ctxExpr)
+ r.Cond = typecheck.Expr(call)
r.Body.Set(cas.Body)
r.Else.Set(append(dflt.Init(), dflt.Body...))
return []ir.Node{r, ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)}
@@ -236,18 +154,18 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
// generate sel-struct
base.Pos = sellineno
- selv := temp(types.NewArray(scasetype(), int64(ncas)))
- init = append(init, typecheck(ir.NewAssignStmt(base.Pos, selv, nil), ctxStmt))
+ selv := typecheck.Temp(types.NewArray(scasetype(), int64(ncas)))
+ init = append(init, typecheck.Stmt(ir.NewAssignStmt(base.Pos, selv, nil)))
// No initialization for order; runtime.selectgo is responsible for that.
- order := temp(types.NewArray(types.Types[types.TUINT16], 2*int64(ncas)))
+ order := typecheck.Temp(types.NewArray(types.Types[types.TUINT16], 2*int64(ncas)))
var pc0, pcs ir.Node
if base.Flag.Race {
- pcs = temp(types.NewArray(types.Types[types.TUINTPTR], int64(ncas)))
- pc0 = typecheck(nodAddr(ir.NewIndexExpr(base.Pos, pcs, ir.NewInt(0))), ctxExpr)
+ pcs = typecheck.Temp(types.NewArray(types.Types[types.TUINTPTR], int64(ncas)))
+ pc0 = typecheck.Expr(typecheck.NodAddr(ir.NewIndexExpr(base.Pos, pcs, ir.NewInt(0))))
} else {
- pc0 = nodnil()
+ pc0 = typecheck.NodNil()
}
// register cases
@@ -286,21 +204,21 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
casorder[i] = cas
setField := func(f string, val ir.Node) {
- r := ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, ir.NewIndexExpr(base.Pos, selv, ir.NewInt(int64(i))), lookup(f)), val)
- init = append(init, typecheck(r, ctxStmt))
+ r := ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, ir.NewIndexExpr(base.Pos, selv, ir.NewInt(int64(i))), typecheck.Lookup(f)), val)
+ init = append(init, typecheck.Stmt(r))
}
- c = convnop(c, types.Types[types.TUNSAFEPTR])
+ c = typecheck.ConvNop(c, types.Types[types.TUNSAFEPTR])
setField("c", c)
if !ir.IsBlank(elem) {
- elem = convnop(elem, types.Types[types.TUNSAFEPTR])
+ elem = typecheck.ConvNop(elem, types.Types[types.TUNSAFEPTR])
setField("elem", elem)
}
// TODO(mdempsky): There should be a cleaner way to
// handle this.
if base.Flag.Race {
- r := mkcall("selectsetpc", nil, nil, nodAddr(ir.NewIndexExpr(base.Pos, pcs, ir.NewInt(int64(i)))))
+ r := mkcall("selectsetpc", nil, nil, typecheck.NodAddr(ir.NewIndexExpr(base.Pos, pcs, ir.NewInt(int64(i)))))
init = append(init, r)
}
}
@@ -310,13 +228,13 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
// run the select
base.Pos = sellineno
- chosen := temp(types.Types[types.TINT])
- recvOK := temp(types.Types[types.TBOOL])
+ chosen := typecheck.Temp(types.Types[types.TINT])
+ recvOK := typecheck.Temp(types.Types[types.TBOOL])
r := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
r.Lhs = []ir.Node{chosen, recvOK}
- fn := syslook("selectgo")
+ fn := typecheck.LookupRuntime("selectgo")
r.Rhs = []ir.Node{mkcall1(fn, fn.Type().Results(), nil, bytePtrToIndex(selv, 0), bytePtrToIndex(order, 0), pc0, ir.NewInt(int64(nsends)), ir.NewInt(int64(nrecvs)), ir.NewBool(dflt == nil))}
- init = append(init, typecheck(r, ctxStmt))
+ init = append(init, typecheck.Stmt(r))
// selv and order are no longer alive after selectgo.
init = append(init, ir.NewUnaryExpr(base.Pos, ir.OVARKILL, selv))
@@ -327,8 +245,8 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
// dispatch cases
dispatch := func(cond ir.Node, cas *ir.CaseStmt) {
- cond = typecheck(cond, ctxExpr)
- cond = defaultlit(cond, nil)
+ cond = typecheck.Expr(cond)
+ cond = typecheck.DefaultLit(cond, nil)
r := ir.NewIfStmt(base.Pos, cond, nil, nil)
@@ -336,7 +254,7 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
n := n.(*ir.AssignListStmt)
if !ir.IsBlank(n.Lhs[1]) {
x := ir.NewAssignStmt(base.Pos, n.Lhs[1], recvOK)
- r.Body.Append(typecheck(x, ctxStmt))
+ r.Body.Append(typecheck.Stmt(x))
}
}
@@ -359,9 +277,9 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
// bytePtrToIndex returns a Node representing "(*byte)(&n[i])".
func bytePtrToIndex(n ir.Node, i int64) ir.Node {
- s := nodAddr(ir.NewIndexExpr(base.Pos, n, ir.NewInt(i)))
+ s := typecheck.NodAddr(ir.NewIndexExpr(base.Pos, n, ir.NewInt(i)))
t := types.NewPtr(types.Types[types.TUINT8])
- return convnop(s, t)
+ return typecheck.ConvNop(s, t)
}
var scase *types.Type
@@ -369,9 +287,9 @@ var scase *types.Type
// Keep in sync with src/runtime/select.go.
func scasetype() *types.Type {
if scase == nil {
- scase = tostruct([]*ir.Field{
- ir.NewField(base.Pos, lookup("c"), nil, types.Types[types.TUNSAFEPTR]),
- ir.NewField(base.Pos, lookup("elem"), nil, types.Types[types.TUNSAFEPTR]),
+ scase = typecheck.NewStructType([]*ir.Field{
+ ir.NewField(base.Pos, typecheck.Lookup("c"), nil, types.Types[types.TUNSAFEPTR]),
+ ir.NewField(base.Pos, typecheck.Lookup("elem"), nil, types.Types[types.TUNSAFEPTR]),
})
scase.SetNoalg(true)
}
diff --git a/src/cmd/compile/internal/gc/sinit.go b/src/cmd/compile/internal/gc/sinit.go
index e9a4590043..26591ad5ab 100644
--- a/src/cmd/compile/internal/gc/sinit.go
+++ b/src/cmd/compile/internal/gc/sinit.go
@@ -7,6 +7,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/obj"
"fmt"
@@ -112,7 +113,7 @@ func (s *InitSchedule) staticcopy(l *ir.Name, loff int64, rn *ir.Name, typ *type
if loff != 0 || !types.Identical(typ, l.Type()) {
dst = ir.NewNameOffsetExpr(base.Pos, l, loff, typ)
}
- s.append(ir.NewAssignStmt(base.Pos, dst, conv(r, typ)))
+ s.append(ir.NewAssignStmt(base.Pos, dst, typecheck.Conv(r, typ)))
return true
case ir.ONIL:
@@ -387,9 +388,9 @@ var statuniqgen int // name generator for static temps
// Use readonlystaticname for read-only node.
func staticname(t *types.Type) *ir.Name {
// Don't use lookupN; it interns the resulting string, but these are all unique.
- n := NewName(lookup(fmt.Sprintf("%s%d", obj.StaticNamePref, statuniqgen)))
+ n := typecheck.NewName(typecheck.Lookup(fmt.Sprintf("%s%d", obj.StaticNamePref, statuniqgen)))
statuniqgen++
- declare(n, ir.PEXTERN)
+ typecheck.Declare(n, ir.PEXTERN)
n.SetType(t)
n.Sym().Linksym().Set(obj.AttrLocal, true)
return n
@@ -541,7 +542,7 @@ func fixedlit(ctxt initContext, kind initKind, n *ir.CompLitExpr, var_ ir.Node,
splitnode = func(r ir.Node) (ir.Node, ir.Node) {
if r.Op() == ir.OKEY {
kv := r.(*ir.KeyExpr)
- k = indexconst(kv.Key)
+ k = typecheck.IndexConst(kv.Key)
if k < 0 {
base.Fatalf("fixedlit: invalid index %v", kv.Key)
}
@@ -596,7 +597,7 @@ func fixedlit(ctxt initContext, kind initKind, n *ir.CompLitExpr, var_ ir.Node,
// build list of assignments: var[index] = expr
ir.SetPos(a)
as := ir.NewAssignStmt(base.Pos, a, value)
- as = typecheck(as, ctxStmt).(*ir.AssignStmt)
+ as = typecheck.Stmt(as).(*ir.AssignStmt)
switch kind {
case initKindStatic:
genAsStatic(as)
@@ -632,7 +633,7 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes)
fixedlit(ctxt, initKindDynamic, n, vstat, init)
// copy static to slice
- var_ = typecheck(var_, ctxExpr|ctxAssign)
+ var_ = typecheck.AssignExpr(var_)
name, offset, ok := stataddr(var_)
if !ok || name.Class_ != ir.PEXTERN {
base.Fatalf("slicelit: %v", var_)
@@ -675,7 +676,7 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes)
}
// make new auto *array (3 declare)
- vauto := temp(types.NewPtr(t))
+ vauto := typecheck.Temp(types.NewPtr(t))
// set auto to point at new temp or heap (3 assign)
var a ir.Node
@@ -687,7 +688,7 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes)
if vstat == nil {
a = ir.NewAssignStmt(base.Pos, x, nil)
- a = typecheck(a, ctxStmt)
+ a = typecheck.Stmt(a)
init.Append(a) // zero new temp
} else {
// Declare that we're about to initialize all of x.
@@ -695,19 +696,19 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes)
init.Append(ir.NewUnaryExpr(base.Pos, ir.OVARDEF, x))
}
- a = nodAddr(x)
+ a = typecheck.NodAddr(x)
} else if n.Esc() == ir.EscNone {
- a = temp(t)
+ a = typecheck.Temp(t)
if vstat == nil {
- a = ir.NewAssignStmt(base.Pos, temp(t), nil)
- a = typecheck(a, ctxStmt)
+ a = ir.NewAssignStmt(base.Pos, typecheck.Temp(t), nil)
+ a = typecheck.Stmt(a)
init.Append(a) // zero new temp
a = a.(*ir.AssignStmt).X
} else {
init.Append(ir.NewUnaryExpr(base.Pos, ir.OVARDEF, a))
}
- a = nodAddr(a)
+ a = typecheck.NodAddr(a)
} else {
a = ir.NewUnaryExpr(base.Pos, ir.ONEW, ir.TypeNode(t))
}
@@ -724,7 +725,7 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes)
for _, value := range n.List {
if value.Op() == ir.OKEY {
kv := value.(*ir.KeyExpr)
- index = indexconst(kv.Key)
+ index = typecheck.IndexConst(kv.Key)
if index < 0 {
base.Fatalf("slicelit: invalid index %v", kv.Key)
}
@@ -758,7 +759,7 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes)
// build list of vauto[c] = expr
ir.SetPos(value)
- as := typecheck(ir.NewAssignStmt(base.Pos, a, value), ctxStmt)
+ as := typecheck.Stmt(ir.NewAssignStmt(base.Pos, a, value))
as = orderStmtInPlace(as, map[string][]*ir.Name{})
as = walkstmt(as)
init.Append(as)
@@ -767,7 +768,7 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes)
// make slice out of heap (6)
a = ir.NewAssignStmt(base.Pos, var_, ir.NewSliceExpr(base.Pos, ir.OSLICE, vauto))
- a = typecheck(a, ctxStmt)
+ a = typecheck.Stmt(a)
a = orderStmtInPlace(a, map[string][]*ir.Name{})
a = walkstmt(a)
init.Append(a)
@@ -822,7 +823,7 @@ func maplit(n *ir.CompLitExpr, m ir.Node, init *ir.Nodes) {
// for i = 0; i < len(vstatk); i++ {
// map[vstatk[i]] = vstate[i]
// }
- i := temp(types.Types[types.TINT])
+ i := typecheck.Temp(types.Types[types.TINT])
rhs := ir.NewIndexExpr(base.Pos, vstate, i)
rhs.SetBounded(true)
@@ -847,8 +848,8 @@ func maplit(n *ir.CompLitExpr, m ir.Node, init *ir.Nodes) {
// Build list of var[c] = expr.
// Use temporaries so that mapassign1 can have addressable key, elem.
// TODO(josharian): avoid map key temporaries for mapfast_* assignments with literal keys.
- tmpkey := temp(m.Type().Key())
- tmpelem := temp(m.Type().Elem())
+ tmpkey := typecheck.Temp(m.Type().Key())
+ tmpelem := typecheck.Temp(m.Type().Elem())
for _, r := range entries {
r := r.(*ir.KeyExpr)
@@ -892,7 +893,7 @@ func anylit(n ir.Node, var_ ir.Node, init *ir.Nodes) {
if n.Alloc != nil {
// n.Right is stack temporary used as backing store.
appendWalkStmt(init, ir.NewAssignStmt(base.Pos, n.Alloc, nil)) // zero backing store, just in case (#18410)
- r = nodAddr(n.Alloc)
+ r = typecheck.NodAddr(n.Alloc)
} else {
r = ir.NewUnaryExpr(base.Pos, ir.ONEW, ir.TypeNode(n.X.Type()))
r.SetEsc(n.Esc())
@@ -900,7 +901,7 @@ func anylit(n ir.Node, var_ ir.Node, init *ir.Nodes) {
appendWalkStmt(init, ir.NewAssignStmt(base.Pos, var_, r))
var_ = ir.NewStarExpr(base.Pos, var_)
- var_ = typecheck(var_, ctxExpr|ctxAssign)
+ var_ = typecheck.AssignExpr(var_)
anylit(n.X, var_, init)
case ir.OSTRUCTLIT, ir.OARRAYLIT:
@@ -1060,7 +1061,7 @@ func (s *InitSchedule) initplan(n ir.Node) {
for _, a := range n.List {
if a.Op() == ir.OKEY {
kv := a.(*ir.KeyExpr)
- k = indexconst(kv.Key)
+ k = typecheck.IndexConst(kv.Key)
if k < 0 {
base.Fatalf("initplan arraylit: invalid index %v", kv.Key)
}
diff --git a/src/cmd/compile/internal/gc/ssa.go b/src/cmd/compile/internal/gc/ssa.go
index 21925a0d65..382e4d4320 100644
--- a/src/cmd/compile/internal/gc/ssa.go
+++ b/src/cmd/compile/internal/gc/ssa.go
@@ -19,6 +19,7 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/ssa"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/obj/x86"
@@ -91,119 +92,119 @@ func initssaconfig() {
ssaCaches = make([]ssa.Cache, base.Flag.LowerC)
// Set up some runtime functions we'll need to call.
- ir.Syms.AssertE2I = sysfunc("assertE2I")
- ir.Syms.AssertE2I2 = sysfunc("assertE2I2")
- ir.Syms.AssertI2I = sysfunc("assertI2I")
- ir.Syms.AssertI2I2 = sysfunc("assertI2I2")
- ir.Syms.Deferproc = sysfunc("deferproc")
- ir.Syms.DeferprocStack = sysfunc("deferprocStack")
- ir.Syms.Deferreturn = sysfunc("deferreturn")
- ir.Syms.Duffcopy = sysfunc("duffcopy")
- ir.Syms.Duffzero = sysfunc("duffzero")
- ir.Syms.GCWriteBarrier = sysfunc("gcWriteBarrier")
- ir.Syms.Goschedguarded = sysfunc("goschedguarded")
- ir.Syms.Growslice = sysfunc("growslice")
- ir.Syms.Msanread = sysfunc("msanread")
- ir.Syms.Msanwrite = sysfunc("msanwrite")
- ir.Syms.Msanmove = sysfunc("msanmove")
- ir.Syms.Newobject = sysfunc("newobject")
- ir.Syms.Newproc = sysfunc("newproc")
- ir.Syms.Panicdivide = sysfunc("panicdivide")
- ir.Syms.PanicdottypeE = sysfunc("panicdottypeE")
- ir.Syms.PanicdottypeI = sysfunc("panicdottypeI")
- ir.Syms.Panicnildottype = sysfunc("panicnildottype")
- ir.Syms.Panicoverflow = sysfunc("panicoverflow")
- ir.Syms.Panicshift = sysfunc("panicshift")
- ir.Syms.Raceread = sysfunc("raceread")
- ir.Syms.Racereadrange = sysfunc("racereadrange")
- ir.Syms.Racewrite = sysfunc("racewrite")
- ir.Syms.Racewriterange = sysfunc("racewriterange")
- ir.Syms.X86HasPOPCNT = sysvar("x86HasPOPCNT") // bool
- ir.Syms.X86HasSSE41 = sysvar("x86HasSSE41") // bool
- ir.Syms.X86HasFMA = sysvar("x86HasFMA") // bool
- ir.Syms.ARMHasVFPv4 = sysvar("armHasVFPv4") // bool
- ir.Syms.ARM64HasATOMICS = sysvar("arm64HasATOMICS") // bool
- ir.Syms.Typedmemclr = sysfunc("typedmemclr")
- ir.Syms.Typedmemmove = sysfunc("typedmemmove")
- ir.Syms.Udiv = sysvar("udiv") // asm func with special ABI
- ir.Syms.WriteBarrier = sysvar("writeBarrier") // struct { bool; ... }
- ir.Syms.Zerobase = sysvar("zerobase")
+ ir.Syms.AssertE2I = typecheck.LookupRuntimeFunc("assertE2I")
+ ir.Syms.AssertE2I2 = typecheck.LookupRuntimeFunc("assertE2I2")
+ ir.Syms.AssertI2I = typecheck.LookupRuntimeFunc("assertI2I")
+ ir.Syms.AssertI2I2 = typecheck.LookupRuntimeFunc("assertI2I2")
+ ir.Syms.Deferproc = typecheck.LookupRuntimeFunc("deferproc")
+ ir.Syms.DeferprocStack = typecheck.LookupRuntimeFunc("deferprocStack")
+ ir.Syms.Deferreturn = typecheck.LookupRuntimeFunc("deferreturn")
+ ir.Syms.Duffcopy = typecheck.LookupRuntimeFunc("duffcopy")
+ ir.Syms.Duffzero = typecheck.LookupRuntimeFunc("duffzero")
+ ir.Syms.GCWriteBarrier = typecheck.LookupRuntimeFunc("gcWriteBarrier")
+ ir.Syms.Goschedguarded = typecheck.LookupRuntimeFunc("goschedguarded")
+ ir.Syms.Growslice = typecheck.LookupRuntimeFunc("growslice")
+ ir.Syms.Msanread = typecheck.LookupRuntimeFunc("msanread")
+ ir.Syms.Msanwrite = typecheck.LookupRuntimeFunc("msanwrite")
+ ir.Syms.Msanmove = typecheck.LookupRuntimeFunc("msanmove")
+ ir.Syms.Newobject = typecheck.LookupRuntimeFunc("newobject")
+ ir.Syms.Newproc = typecheck.LookupRuntimeFunc("newproc")
+ ir.Syms.Panicdivide = typecheck.LookupRuntimeFunc("panicdivide")
+ ir.Syms.PanicdottypeE = typecheck.LookupRuntimeFunc("panicdottypeE")
+ ir.Syms.PanicdottypeI = typecheck.LookupRuntimeFunc("panicdottypeI")
+ ir.Syms.Panicnildottype = typecheck.LookupRuntimeFunc("panicnildottype")
+ ir.Syms.Panicoverflow = typecheck.LookupRuntimeFunc("panicoverflow")
+ ir.Syms.Panicshift = typecheck.LookupRuntimeFunc("panicshift")
+ ir.Syms.Raceread = typecheck.LookupRuntimeFunc("raceread")
+ ir.Syms.Racereadrange = typecheck.LookupRuntimeFunc("racereadrange")
+ ir.Syms.Racewrite = typecheck.LookupRuntimeFunc("racewrite")
+ ir.Syms.Racewriterange = typecheck.LookupRuntimeFunc("racewriterange")
+ ir.Syms.X86HasPOPCNT = typecheck.LookupRuntimeVar("x86HasPOPCNT") // bool
+ ir.Syms.X86HasSSE41 = typecheck.LookupRuntimeVar("x86HasSSE41") // bool
+ ir.Syms.X86HasFMA = typecheck.LookupRuntimeVar("x86HasFMA") // bool
+ ir.Syms.ARMHasVFPv4 = typecheck.LookupRuntimeVar("armHasVFPv4") // bool
+ ir.Syms.ARM64HasATOMICS = typecheck.LookupRuntimeVar("arm64HasATOMICS") // bool
+ ir.Syms.Typedmemclr = typecheck.LookupRuntimeFunc("typedmemclr")
+ ir.Syms.Typedmemmove = typecheck.LookupRuntimeFunc("typedmemmove")
+ ir.Syms.Udiv = typecheck.LookupRuntimeVar("udiv") // asm func with special ABI
+ ir.Syms.WriteBarrier = typecheck.LookupRuntimeVar("writeBarrier") // struct { bool; ... }
+ ir.Syms.Zerobase = typecheck.LookupRuntimeVar("zerobase")
// asm funcs with special ABI
if thearch.LinkArch.Name == "amd64" {
GCWriteBarrierReg = map[int16]*obj.LSym{
- x86.REG_AX: sysfunc("gcWriteBarrier"),
- x86.REG_CX: sysfunc("gcWriteBarrierCX"),
- x86.REG_DX: sysfunc("gcWriteBarrierDX"),
- x86.REG_BX: sysfunc("gcWriteBarrierBX"),
- x86.REG_BP: sysfunc("gcWriteBarrierBP"),
- x86.REG_SI: sysfunc("gcWriteBarrierSI"),
- x86.REG_R8: sysfunc("gcWriteBarrierR8"),
- x86.REG_R9: sysfunc("gcWriteBarrierR9"),
+ x86.REG_AX: typecheck.LookupRuntimeFunc("gcWriteBarrier"),
+ x86.REG_CX: typecheck.LookupRuntimeFunc("gcWriteBarrierCX"),
+ x86.REG_DX: typecheck.LookupRuntimeFunc("gcWriteBarrierDX"),
+ x86.REG_BX: typecheck.LookupRuntimeFunc("gcWriteBarrierBX"),
+ x86.REG_BP: typecheck.LookupRuntimeFunc("gcWriteBarrierBP"),
+ x86.REG_SI: typecheck.LookupRuntimeFunc("gcWriteBarrierSI"),
+ x86.REG_R8: typecheck.LookupRuntimeFunc("gcWriteBarrierR8"),
+ x86.REG_R9: typecheck.LookupRuntimeFunc("gcWriteBarrierR9"),
}
}
if thearch.LinkArch.Family == sys.Wasm {
- BoundsCheckFunc[ssa.BoundsIndex] = sysfunc("goPanicIndex")
- BoundsCheckFunc[ssa.BoundsIndexU] = sysfunc("goPanicIndexU")
- BoundsCheckFunc[ssa.BoundsSliceAlen] = sysfunc("goPanicSliceAlen")
- BoundsCheckFunc[ssa.BoundsSliceAlenU] = sysfunc("goPanicSliceAlenU")
- BoundsCheckFunc[ssa.BoundsSliceAcap] = sysfunc("goPanicSliceAcap")
- BoundsCheckFunc[ssa.BoundsSliceAcapU] = sysfunc("goPanicSliceAcapU")
- BoundsCheckFunc[ssa.BoundsSliceB] = sysfunc("goPanicSliceB")
- BoundsCheckFunc[ssa.BoundsSliceBU] = sysfunc("goPanicSliceBU")
- BoundsCheckFunc[ssa.BoundsSlice3Alen] = sysfunc("goPanicSlice3Alen")
- BoundsCheckFunc[ssa.BoundsSlice3AlenU] = sysfunc("goPanicSlice3AlenU")
- BoundsCheckFunc[ssa.BoundsSlice3Acap] = sysfunc("goPanicSlice3Acap")
- BoundsCheckFunc[ssa.BoundsSlice3AcapU] = sysfunc("goPanicSlice3AcapU")
- BoundsCheckFunc[ssa.BoundsSlice3B] = sysfunc("goPanicSlice3B")
- BoundsCheckFunc[ssa.BoundsSlice3BU] = sysfunc("goPanicSlice3BU")
- BoundsCheckFunc[ssa.BoundsSlice3C] = sysfunc("goPanicSlice3C")
- BoundsCheckFunc[ssa.BoundsSlice3CU] = sysfunc("goPanicSlice3CU")
+ BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("goPanicIndex")
+ BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("goPanicIndexU")
+ BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("goPanicSliceAlen")
+ BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("goPanicSliceAlenU")
+ BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("goPanicSliceAcap")
+ BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("goPanicSliceAcapU")
+ BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("goPanicSliceB")
+ BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("goPanicSliceBU")
+ BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("goPanicSlice3Alen")
+ BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("goPanicSlice3AlenU")
+ BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("goPanicSlice3Acap")
+ BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("goPanicSlice3AcapU")
+ BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("goPanicSlice3B")
+ BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("goPanicSlice3BU")
+ BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("goPanicSlice3C")
+ BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("goPanicSlice3CU")
} else {
- BoundsCheckFunc[ssa.BoundsIndex] = sysfunc("panicIndex")
- BoundsCheckFunc[ssa.BoundsIndexU] = sysfunc("panicIndexU")
- BoundsCheckFunc[ssa.BoundsSliceAlen] = sysfunc("panicSliceAlen")
- BoundsCheckFunc[ssa.BoundsSliceAlenU] = sysfunc("panicSliceAlenU")
- BoundsCheckFunc[ssa.BoundsSliceAcap] = sysfunc("panicSliceAcap")
- BoundsCheckFunc[ssa.BoundsSliceAcapU] = sysfunc("panicSliceAcapU")
- BoundsCheckFunc[ssa.BoundsSliceB] = sysfunc("panicSliceB")
- BoundsCheckFunc[ssa.BoundsSliceBU] = sysfunc("panicSliceBU")
- BoundsCheckFunc[ssa.BoundsSlice3Alen] = sysfunc("panicSlice3Alen")
- BoundsCheckFunc[ssa.BoundsSlice3AlenU] = sysfunc("panicSlice3AlenU")
- BoundsCheckFunc[ssa.BoundsSlice3Acap] = sysfunc("panicSlice3Acap")
- BoundsCheckFunc[ssa.BoundsSlice3AcapU] = sysfunc("panicSlice3AcapU")
- BoundsCheckFunc[ssa.BoundsSlice3B] = sysfunc("panicSlice3B")
- BoundsCheckFunc[ssa.BoundsSlice3BU] = sysfunc("panicSlice3BU")
- BoundsCheckFunc[ssa.BoundsSlice3C] = sysfunc("panicSlice3C")
- BoundsCheckFunc[ssa.BoundsSlice3CU] = sysfunc("panicSlice3CU")
+ BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("panicIndex")
+ BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("panicIndexU")
+ BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("panicSliceAlen")
+ BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("panicSliceAlenU")
+ BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("panicSliceAcap")
+ BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("panicSliceAcapU")
+ BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("panicSliceB")
+ BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("panicSliceBU")
+ BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("panicSlice3Alen")
+ BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("panicSlice3AlenU")
+ BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("panicSlice3Acap")
+ BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("panicSlice3AcapU")
+ BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("panicSlice3B")
+ BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("panicSlice3BU")
+ BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("panicSlice3C")
+ BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("panicSlice3CU")
}
if thearch.LinkArch.PtrSize == 4 {
- ExtendCheckFunc[ssa.BoundsIndex] = sysvar("panicExtendIndex")
- ExtendCheckFunc[ssa.BoundsIndexU] = sysvar("panicExtendIndexU")
- ExtendCheckFunc[ssa.BoundsSliceAlen] = sysvar("panicExtendSliceAlen")
- ExtendCheckFunc[ssa.BoundsSliceAlenU] = sysvar("panicExtendSliceAlenU")
- ExtendCheckFunc[ssa.BoundsSliceAcap] = sysvar("panicExtendSliceAcap")
- ExtendCheckFunc[ssa.BoundsSliceAcapU] = sysvar("panicExtendSliceAcapU")
- ExtendCheckFunc[ssa.BoundsSliceB] = sysvar("panicExtendSliceB")
- ExtendCheckFunc[ssa.BoundsSliceBU] = sysvar("panicExtendSliceBU")
- ExtendCheckFunc[ssa.BoundsSlice3Alen] = sysvar("panicExtendSlice3Alen")
- ExtendCheckFunc[ssa.BoundsSlice3AlenU] = sysvar("panicExtendSlice3AlenU")
- ExtendCheckFunc[ssa.BoundsSlice3Acap] = sysvar("panicExtendSlice3Acap")
- ExtendCheckFunc[ssa.BoundsSlice3AcapU] = sysvar("panicExtendSlice3AcapU")
- ExtendCheckFunc[ssa.BoundsSlice3B] = sysvar("panicExtendSlice3B")
- ExtendCheckFunc[ssa.BoundsSlice3BU] = sysvar("panicExtendSlice3BU")
- ExtendCheckFunc[ssa.BoundsSlice3C] = sysvar("panicExtendSlice3C")
- ExtendCheckFunc[ssa.BoundsSlice3CU] = sysvar("panicExtendSlice3CU")
+ ExtendCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeVar("panicExtendIndex")
+ ExtendCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeVar("panicExtendIndexU")
+ ExtendCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeVar("panicExtendSliceAlen")
+ ExtendCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeVar("panicExtendSliceAlenU")
+ ExtendCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeVar("panicExtendSliceAcap")
+ ExtendCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeVar("panicExtendSliceAcapU")
+ ExtendCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeVar("panicExtendSliceB")
+ ExtendCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeVar("panicExtendSliceBU")
+ ExtendCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeVar("panicExtendSlice3Alen")
+ ExtendCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeVar("panicExtendSlice3AlenU")
+ ExtendCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeVar("panicExtendSlice3Acap")
+ ExtendCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeVar("panicExtendSlice3AcapU")
+ ExtendCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeVar("panicExtendSlice3B")
+ ExtendCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeVar("panicExtendSlice3BU")
+ ExtendCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeVar("panicExtendSlice3C")
+ ExtendCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeVar("panicExtendSlice3CU")
}
// Wasm (all asm funcs with special ABIs)
- ir.Syms.WasmMove = sysvar("wasmMove")
- ir.Syms.WasmZero = sysvar("wasmZero")
- ir.Syms.WasmDiv = sysvar("wasmDiv")
- ir.Syms.WasmTruncS = sysvar("wasmTruncS")
- ir.Syms.WasmTruncU = sysvar("wasmTruncU")
- ir.Syms.SigPanic = sysfunc("sigpanic")
+ ir.Syms.WasmMove = typecheck.LookupRuntimeVar("wasmMove")
+ ir.Syms.WasmZero = typecheck.LookupRuntimeVar("wasmZero")
+ ir.Syms.WasmDiv = typecheck.LookupRuntimeVar("wasmDiv")
+ ir.Syms.WasmTruncS = typecheck.LookupRuntimeVar("wasmTruncS")
+ ir.Syms.WasmTruncU = typecheck.LookupRuntimeVar("wasmTruncU")
+ ir.Syms.SigPanic = typecheck.LookupRuntimeFunc("sigpanic")
}
// getParam returns the Field of ith param of node n (which is a
@@ -418,7 +419,7 @@ func buildssa(fn *ir.Func, worker int) *ssa.Func {
// Create the deferBits variable and stack slot. deferBits is a
// bitmask showing which of the open-coded defers in this function
// have been activated.
- deferBitsTemp := tempAt(src.NoXPos, s.curfn, types.Types[types.TUINT8])
+ deferBitsTemp := typecheck.TempAt(src.NoXPos, s.curfn, types.Types[types.TUINT8])
s.deferBitsTemp = deferBitsTemp
// For this value, AuxInt is initialized to zero by default
startDeferBits := s.entryNewValue0(ssa.OpConst8, types.Types[types.TUINT8])
@@ -710,7 +711,7 @@ func (s *state) Warnl(pos src.XPos, msg string, args ...interface{}) { s.f.Warnl
func (s *state) Debug_checknil() bool { return s.f.Frontend().Debug_checknil() }
func ssaMarker(name string) *ir.Name {
- return NewName(&types.Sym{Name: name})
+ return typecheck.NewName(&types.Sym{Name: name})
}
var (
@@ -3342,38 +3343,38 @@ var softFloatOps map[ssa.Op]sfRtCallDef
func softfloatInit() {
// Some of these operations get transformed by sfcall.
softFloatOps = map[ssa.Op]sfRtCallDef{
- ssa.OpAdd32F: sfRtCallDef{sysfunc("fadd32"), types.TFLOAT32},
- ssa.OpAdd64F: sfRtCallDef{sysfunc("fadd64"), types.TFLOAT64},
- ssa.OpSub32F: sfRtCallDef{sysfunc("fadd32"), types.TFLOAT32},
- ssa.OpSub64F: sfRtCallDef{sysfunc("fadd64"), types.TFLOAT64},
- ssa.OpMul32F: sfRtCallDef{sysfunc("fmul32"), types.TFLOAT32},
- ssa.OpMul64F: sfRtCallDef{sysfunc("fmul64"), types.TFLOAT64},
- ssa.OpDiv32F: sfRtCallDef{sysfunc("fdiv32"), types.TFLOAT32},
- ssa.OpDiv64F: sfRtCallDef{sysfunc("fdiv64"), types.TFLOAT64},
-
- ssa.OpEq64F: sfRtCallDef{sysfunc("feq64"), types.TBOOL},
- ssa.OpEq32F: sfRtCallDef{sysfunc("feq32"), types.TBOOL},
- ssa.OpNeq64F: sfRtCallDef{sysfunc("feq64"), types.TBOOL},
- ssa.OpNeq32F: sfRtCallDef{sysfunc("feq32"), types.TBOOL},
- ssa.OpLess64F: sfRtCallDef{sysfunc("fgt64"), types.TBOOL},
- ssa.OpLess32F: sfRtCallDef{sysfunc("fgt32"), types.TBOOL},
- ssa.OpLeq64F: sfRtCallDef{sysfunc("fge64"), types.TBOOL},
- ssa.OpLeq32F: sfRtCallDef{sysfunc("fge32"), types.TBOOL},
-
- ssa.OpCvt32to32F: sfRtCallDef{sysfunc("fint32to32"), types.TFLOAT32},
- ssa.OpCvt32Fto32: sfRtCallDef{sysfunc("f32toint32"), types.TINT32},
- ssa.OpCvt64to32F: sfRtCallDef{sysfunc("fint64to32"), types.TFLOAT32},
- ssa.OpCvt32Fto64: sfRtCallDef{sysfunc("f32toint64"), types.TINT64},
- ssa.OpCvt64Uto32F: sfRtCallDef{sysfunc("fuint64to32"), types.TFLOAT32},
- ssa.OpCvt32Fto64U: sfRtCallDef{sysfunc("f32touint64"), types.TUINT64},
- ssa.OpCvt32to64F: sfRtCallDef{sysfunc("fint32to64"), types.TFLOAT64},
- ssa.OpCvt64Fto32: sfRtCallDef{sysfunc("f64toint32"), types.TINT32},
- ssa.OpCvt64to64F: sfRtCallDef{sysfunc("fint64to64"), types.TFLOAT64},
- ssa.OpCvt64Fto64: sfRtCallDef{sysfunc("f64toint64"), types.TINT64},
- ssa.OpCvt64Uto64F: sfRtCallDef{sysfunc("fuint64to64"), types.TFLOAT64},
- ssa.OpCvt64Fto64U: sfRtCallDef{sysfunc("f64touint64"), types.TUINT64},
- ssa.OpCvt32Fto64F: sfRtCallDef{sysfunc("f32to64"), types.TFLOAT64},
- ssa.OpCvt64Fto32F: sfRtCallDef{sysfunc("f64to32"), types.TFLOAT32},
+ ssa.OpAdd32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
+ ssa.OpAdd64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
+ ssa.OpSub32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
+ ssa.OpSub64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
+ ssa.OpMul32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fmul32"), types.TFLOAT32},
+ ssa.OpMul64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fmul64"), types.TFLOAT64},
+ ssa.OpDiv32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fdiv32"), types.TFLOAT32},
+ ssa.OpDiv64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fdiv64"), types.TFLOAT64},
+
+ ssa.OpEq64F: sfRtCallDef{typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
+ ssa.OpEq32F: sfRtCallDef{typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
+ ssa.OpNeq64F: sfRtCallDef{typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
+ ssa.OpNeq32F: sfRtCallDef{typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
+ ssa.OpLess64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fgt64"), types.TBOOL},
+ ssa.OpLess32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fgt32"), types.TBOOL},
+ ssa.OpLeq64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fge64"), types.TBOOL},
+ ssa.OpLeq32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fge32"), types.TBOOL},
+
+ ssa.OpCvt32to32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fint32to32"), types.TFLOAT32},
+ ssa.OpCvt32Fto32: sfRtCallDef{typecheck.LookupRuntimeFunc("f32toint32"), types.TINT32},
+ ssa.OpCvt64to32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fint64to32"), types.TFLOAT32},
+ ssa.OpCvt32Fto64: sfRtCallDef{typecheck.LookupRuntimeFunc("f32toint64"), types.TINT64},
+ ssa.OpCvt64Uto32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fuint64to32"), types.TFLOAT32},
+ ssa.OpCvt32Fto64U: sfRtCallDef{typecheck.LookupRuntimeFunc("f32touint64"), types.TUINT64},
+ ssa.OpCvt32to64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fint32to64"), types.TFLOAT64},
+ ssa.OpCvt64Fto32: sfRtCallDef{typecheck.LookupRuntimeFunc("f64toint32"), types.TINT32},
+ ssa.OpCvt64to64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fint64to64"), types.TFLOAT64},
+ ssa.OpCvt64Fto64: sfRtCallDef{typecheck.LookupRuntimeFunc("f64toint64"), types.TINT64},
+ ssa.OpCvt64Uto64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fuint64to64"), types.TFLOAT64},
+ ssa.OpCvt64Fto64U: sfRtCallDef{typecheck.LookupRuntimeFunc("f64touint64"), types.TUINT64},
+ ssa.OpCvt32Fto64F: sfRtCallDef{typecheck.LookupRuntimeFunc("f32to64"), types.TFLOAT64},
+ ssa.OpCvt64Fto32F: sfRtCallDef{typecheck.LookupRuntimeFunc("f64to32"), types.TFLOAT32},
}
}
@@ -4458,7 +4459,7 @@ func (s *state) openDeferSave(n ir.Node, t *types.Type, val *ssa.Value) *ssa.Val
} else {
pos = n.Pos()
}
- argTemp := tempAt(pos.WithNotStmt(), s.curfn, t)
+ argTemp := typecheck.TempAt(pos.WithNotStmt(), s.curfn, t)
argTemp.SetOpenDeferSlot(true)
var addrArgTemp *ssa.Value
// Use OpVarLive to make sure stack slots for the args, etc. are not
@@ -4719,7 +4720,7 @@ func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Val
testLateExpansion = ssa.LateCallExpansionEnabledWithin(s.f)
// Make a defer struct d on the stack.
t := deferstruct(stksize)
- d := tempAt(n.Pos(), s.curfn, t)
+ d := typecheck.TempAt(n.Pos(), s.curfn, t)
s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, d, s.mem())
addr := s.addr(d)
@@ -6144,7 +6145,7 @@ func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Val
if commaok && !canSSAType(n.Type()) {
// unSSAable type, use temporary.
// TODO: get rid of some of these temporaries.
- tmp = tempAt(n.Pos(), s.curfn, n.Type())
+ tmp = typecheck.TempAt(n.Pos(), s.curfn, n.Type())
s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, tmp.(*ir.Name), s.mem())
addr = s.addr(tmp)
}
@@ -7173,7 +7174,7 @@ func (e *ssafn) StringData(s string) *obj.LSym {
}
func (e *ssafn) Auto(pos src.XPos, t *types.Type) *ir.Name {
- return tempAt(pos, e.curfn, t) // Note: adds new auto to e.curfn.Func.Dcl list
+ return typecheck.TempAt(pos, e.curfn, t) // Note: adds new auto to e.curfn.Func.Dcl list
}
func (e *ssafn) SplitString(name ssa.LocalSlot) (ssa.LocalSlot, ssa.LocalSlot) {
diff --git a/src/cmd/compile/internal/gc/subr.go b/src/cmd/compile/internal/gc/subr.go
index d4c7c6db1a..8e2093d488 100644
--- a/src/cmd/compile/internal/gc/subr.go
+++ b/src/cmd/compile/internal/gc/subr.go
@@ -7,11 +7,10 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/src"
"fmt"
- "sort"
- "strconv"
"strings"
"sync"
"unicode"
@@ -31,71 +30,35 @@ var (
largeStackFrames []largeStack
)
-func lookup(name string) *types.Sym {
- return types.LocalPkg.Lookup(name)
-}
-
-// lookupN looks up the symbol starting with prefix and ending with
-// the decimal n. If prefix is too long, lookupN panics.
-func lookupN(prefix string, n int) *types.Sym {
- var buf [20]byte // plenty long enough for all current users
- copy(buf[:], prefix)
- b := strconv.AppendInt(buf[:len(prefix)], int64(n), 10)
- return types.LocalPkg.LookupBytes(b)
-}
-
-// autolabel generates a new Name node for use with
-// an automatically generated label.
-// prefix is a short mnemonic (e.g. ".s" for switch)
-// to help with debugging.
-// It should begin with "." to avoid conflicts with
-// user labels.
-func autolabel(prefix string) *types.Sym {
- if prefix[0] != '.' {
- base.Fatalf("autolabel prefix must start with '.', have %q", prefix)
- }
- fn := ir.CurFunc
- if ir.CurFunc == nil {
- base.Fatalf("autolabel outside function")
- }
- n := fn.Label
- fn.Label++
- return lookupN(prefix, int(n))
-}
-
// dotImports tracks all PkgNames that have been dot-imported.
var dotImports []*ir.PkgName
-// dotImportRefs maps idents introduced by importDot back to the
-// ir.PkgName they were dot-imported through.
-var dotImportRefs map[*ir.Ident]*ir.PkgName
-
// find all the exported symbols in package referenced by PkgName,
// and make them available in the current package
func importDot(pack *ir.PkgName) {
- if dotImportRefs == nil {
- dotImportRefs = make(map[*ir.Ident]*ir.PkgName)
+ if typecheck.DotImportRefs == nil {
+ typecheck.DotImportRefs = make(map[*ir.Ident]*ir.PkgName)
}
opkg := pack.Pkg
for _, s := range opkg.Syms {
if s.Def == nil {
- if _, ok := declImporter[s]; !ok {
+ if _, ok := typecheck.DeclImporter[s]; !ok {
continue
}
}
if !types.IsExported(s.Name) || strings.ContainsRune(s.Name, 0xb7) { // 0xb7 = center dot
continue
}
- s1 := lookup(s.Name)
+ s1 := typecheck.Lookup(s.Name)
if s1.Def != nil {
pkgerror := fmt.Sprintf("during import %q", opkg.Path)
- redeclare(base.Pos, s1, pkgerror)
+ typecheck.Redeclared(base.Pos, s1, pkgerror)
continue
}
id := ir.NewIdent(src.NoXPos, s)
- dotImportRefs[id] = pack
+ typecheck.DotImportRefs[id] = pack
s1.Def = id
s1.Block = 1
}
@@ -113,347 +76,7 @@ func checkDotImports() {
// No longer needed; release memory.
dotImports = nil
- dotImportRefs = nil
-}
-
-// nodAddr returns a node representing &n at base.Pos.
-func nodAddr(n ir.Node) *ir.AddrExpr {
- return nodAddrAt(base.Pos, n)
-}
-
-// nodAddrPos returns a node representing &n at position pos.
-func nodAddrAt(pos src.XPos, n ir.Node) *ir.AddrExpr {
- return ir.NewAddrExpr(pos, n)
-}
-
-// newname returns a new ONAME Node associated with symbol s.
-func NewName(s *types.Sym) *ir.Name {
- n := ir.NewNameAt(base.Pos, s)
- n.Curfn = ir.CurFunc
- return n
-}
-
-func nodnil() ir.Node {
- n := ir.NewNilExpr(base.Pos)
- n.SetType(types.Types[types.TNIL])
- return n
-}
-
-func isptrto(t *types.Type, et types.Kind) bool {
- if t == nil {
- return false
- }
- if !t.IsPtr() {
- return false
- }
- t = t.Elem()
- if t == nil {
- return false
- }
- if t.Kind() != et {
- return false
- }
- return true
-}
-
-// Is type src assignment compatible to type dst?
-// If so, return op code to use in conversion.
-// If not, return OXXX. In this case, the string return parameter may
-// hold a reason why. In all other cases, it'll be the empty string.
-func assignop(src, dst *types.Type) (ir.Op, string) {
- if src == dst {
- return ir.OCONVNOP, ""
- }
- if src == nil || dst == nil || src.Kind() == types.TFORW || dst.Kind() == types.TFORW || src.Underlying() == nil || dst.Underlying() == nil {
- return ir.OXXX, ""
- }
-
- // 1. src type is identical to dst.
- if types.Identical(src, dst) {
- return ir.OCONVNOP, ""
- }
-
- // 2. src and dst have identical underlying types
- // and either src or dst is not a named type or
- // both are empty interface types.
- // For assignable but different non-empty interface types,
- // we want to recompute the itab. Recomputing the itab ensures
- // that itabs are unique (thus an interface with a compile-time
- // type I has an itab with interface type I).
- if types.Identical(src.Underlying(), dst.Underlying()) {
- if src.IsEmptyInterface() {
- // Conversion between two empty interfaces
- // requires no code.
- return ir.OCONVNOP, ""
- }
- if (src.Sym() == nil || dst.Sym() == nil) && !src.IsInterface() {
- // Conversion between two types, at least one unnamed,
- // needs no conversion. The exception is nonempty interfaces
- // which need to have their itab updated.
- return ir.OCONVNOP, ""
- }
- }
-
- // 3. dst is an interface type and src implements dst.
- if dst.IsInterface() && src.Kind() != types.TNIL {
- var missing, have *types.Field
- var ptr int
- if implements(src, dst, &missing, &have, &ptr) {
- // Call itabname so that (src, dst)
- // gets added to itabs early, which allows
- // us to de-virtualize calls through this
- // type/interface pair later. See peekitabs in reflect.go
- if types.IsDirectIface(src) && !dst.IsEmptyInterface() {
- NeedITab(src, dst)
- }
-
- return ir.OCONVIFACE, ""
- }
-
- // we'll have complained about this method anyway, suppress spurious messages.
- if have != nil && have.Sym == missing.Sym && (have.Type.Broke() || missing.Type.Broke()) {
- return ir.OCONVIFACE, ""
- }
-
- var why string
- if isptrto(src, types.TINTER) {
- why = fmt.Sprintf(":\n\t%v is pointer to interface, not interface", src)
- } else if have != nil && have.Sym == missing.Sym && have.Nointerface() {
- why = fmt.Sprintf(":\n\t%v does not implement %v (%v method is marked 'nointerface')", src, dst, missing.Sym)
- } else if have != nil && have.Sym == missing.Sym {
- why = fmt.Sprintf(":\n\t%v does not implement %v (wrong type for %v method)\n"+
- "\t\thave %v%S\n\t\twant %v%S", src, dst, missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
- } else if ptr != 0 {
- why = fmt.Sprintf(":\n\t%v does not implement %v (%v method has pointer receiver)", src, dst, missing.Sym)
- } else if have != nil {
- why = fmt.Sprintf(":\n\t%v does not implement %v (missing %v method)\n"+
- "\t\thave %v%S\n\t\twant %v%S", src, dst, missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
- } else {
- why = fmt.Sprintf(":\n\t%v does not implement %v (missing %v method)", src, dst, missing.Sym)
- }
-
- return ir.OXXX, why
- }
-
- if isptrto(dst, types.TINTER) {
- why := fmt.Sprintf(":\n\t%v is pointer to interface, not interface", dst)
- return ir.OXXX, why
- }
-
- if src.IsInterface() && dst.Kind() != types.TBLANK {
- var missing, have *types.Field
- var ptr int
- var why string
- if implements(dst, src, &missing, &have, &ptr) {
- why = ": need type assertion"
- }
- return ir.OXXX, why
- }
-
- // 4. src is a bidirectional channel value, dst is a channel type,
- // src and dst have identical element types, and
- // either src or dst is not a named type.
- if src.IsChan() && src.ChanDir() == types.Cboth && dst.IsChan() {
- if types.Identical(src.Elem(), dst.Elem()) && (src.Sym() == nil || dst.Sym() == nil) {
- return ir.OCONVNOP, ""
- }
- }
-
- // 5. src is the predeclared identifier nil and dst is a nillable type.
- if src.Kind() == types.TNIL {
- switch dst.Kind() {
- case types.TPTR,
- types.TFUNC,
- types.TMAP,
- types.TCHAN,
- types.TINTER,
- types.TSLICE:
- return ir.OCONVNOP, ""
- }
- }
-
- // 6. rule about untyped constants - already converted by defaultlit.
-
- // 7. Any typed value can be assigned to the blank identifier.
- if dst.Kind() == types.TBLANK {
- return ir.OCONVNOP, ""
- }
-
- return ir.OXXX, ""
-}
-
-// Can we convert a value of type src to a value of type dst?
-// If so, return op code to use in conversion (maybe OCONVNOP).
-// If not, return OXXX. In this case, the string return parameter may
-// hold a reason why. In all other cases, it'll be the empty string.
-// srcConstant indicates whether the value of type src is a constant.
-func convertop(srcConstant bool, src, dst *types.Type) (ir.Op, string) {
- if src == dst {
- return ir.OCONVNOP, ""
- }
- if src == nil || dst == nil {
- return ir.OXXX, ""
- }
-
- // Conversions from regular to go:notinheap are not allowed
- // (unless it's unsafe.Pointer). These are runtime-specific
- // rules.
- // (a) Disallow (*T) to (*U) where T is go:notinheap but U isn't.
- if src.IsPtr() && dst.IsPtr() && dst.Elem().NotInHeap() && !src.Elem().NotInHeap() {
- why := fmt.Sprintf(":\n\t%v is incomplete (or unallocatable), but %v is not", dst.Elem(), src.Elem())
- return ir.OXXX, why
- }
- // (b) Disallow string to []T where T is go:notinheap.
- if src.IsString() && dst.IsSlice() && dst.Elem().NotInHeap() && (dst.Elem().Kind() == types.ByteType.Kind() || dst.Elem().Kind() == types.RuneType.Kind()) {
- why := fmt.Sprintf(":\n\t%v is incomplete (or unallocatable)", dst.Elem())
- return ir.OXXX, why
- }
-
- // 1. src can be assigned to dst.
- op, why := assignop(src, dst)
- if op != ir.OXXX {
- return op, why
- }
-
- // The rules for interfaces are no different in conversions
- // than assignments. If interfaces are involved, stop now
- // with the good message from assignop.
- // Otherwise clear the error.
- if src.IsInterface() || dst.IsInterface() {
- return ir.OXXX, why
- }
-
- // 2. Ignoring struct tags, src and dst have identical underlying types.
- if types.IdenticalIgnoreTags(src.Underlying(), dst.Underlying()) {
- return ir.OCONVNOP, ""
- }
-
- // 3. src and dst are unnamed pointer types and, ignoring struct tags,
- // their base types have identical underlying types.
- if src.IsPtr() && dst.IsPtr() && src.Sym() == nil && dst.Sym() == nil {
- if types.IdenticalIgnoreTags(src.Elem().Underlying(), dst.Elem().Underlying()) {
- return ir.OCONVNOP, ""
- }
- }
-
- // 4. src and dst are both integer or floating point types.
- if (src.IsInteger() || src.IsFloat()) && (dst.IsInteger() || dst.IsFloat()) {
- if types.SimType[src.Kind()] == types.SimType[dst.Kind()] {
- return ir.OCONVNOP, ""
- }
- return ir.OCONV, ""
- }
-
- // 5. src and dst are both complex types.
- if src.IsComplex() && dst.IsComplex() {
- if types.SimType[src.Kind()] == types.SimType[dst.Kind()] {
- return ir.OCONVNOP, ""
- }
- return ir.OCONV, ""
- }
-
- // Special case for constant conversions: any numeric
- // conversion is potentially okay. We'll validate further
- // within evconst. See #38117.
- if srcConstant && (src.IsInteger() || src.IsFloat() || src.IsComplex()) && (dst.IsInteger() || dst.IsFloat() || dst.IsComplex()) {
- return ir.OCONV, ""
- }
-
- // 6. src is an integer or has type []byte or []rune
- // and dst is a string type.
- if src.IsInteger() && dst.IsString() {
- return ir.ORUNESTR, ""
- }
-
- if src.IsSlice() && dst.IsString() {
- if src.Elem().Kind() == types.ByteType.Kind() {
- return ir.OBYTES2STR, ""
- }
- if src.Elem().Kind() == types.RuneType.Kind() {
- return ir.ORUNES2STR, ""
- }
- }
-
- // 7. src is a string and dst is []byte or []rune.
- // String to slice.
- if src.IsString() && dst.IsSlice() {
- if dst.Elem().Kind() == types.ByteType.Kind() {
- return ir.OSTR2BYTES, ""
- }
- if dst.Elem().Kind() == types.RuneType.Kind() {
- return ir.OSTR2RUNES, ""
- }
- }
-
- // 8. src is a pointer or uintptr and dst is unsafe.Pointer.
- if (src.IsPtr() || src.IsUintptr()) && dst.IsUnsafePtr() {
- return ir.OCONVNOP, ""
- }
-
- // 9. src is unsafe.Pointer and dst is a pointer or uintptr.
- if src.IsUnsafePtr() && (dst.IsPtr() || dst.IsUintptr()) {
- return ir.OCONVNOP, ""
- }
-
- // src is map and dst is a pointer to corresponding hmap.
- // This rule is needed for the implementation detail that
- // go gc maps are implemented as a pointer to a hmap struct.
- if src.Kind() == types.TMAP && dst.IsPtr() &&
- src.MapType().Hmap == dst.Elem() {
- return ir.OCONVNOP, ""
- }
-
- return ir.OXXX, ""
-}
-
-func assignconv(n ir.Node, t *types.Type, context string) ir.Node {
- return assignconvfn(n, t, func() string { return context })
-}
-
-// Convert node n for assignment to type t.
-func assignconvfn(n ir.Node, t *types.Type, context func() string) ir.Node {
- if n == nil || n.Type() == nil || n.Type().Broke() {
- return n
- }
-
- if t.Kind() == types.TBLANK && n.Type().Kind() == types.TNIL {
- base.Errorf("use of untyped nil")
- }
-
- n = convlit1(n, t, false, context)
- if n.Type() == nil {
- return n
- }
- if t.Kind() == types.TBLANK {
- return n
- }
-
- // Convert ideal bool from comparison to plain bool
- // if the next step is non-bool (like interface{}).
- if n.Type() == types.UntypedBool && !t.IsBoolean() {
- if n.Op() == ir.ONAME || n.Op() == ir.OLITERAL {
- r := ir.NewConvExpr(base.Pos, ir.OCONVNOP, nil, n)
- r.SetType(types.Types[types.TBOOL])
- r.SetTypecheck(1)
- r.SetImplicit(true)
- n = r
- }
- }
-
- if types.Identical(n.Type(), t) {
- return n
- }
-
- op, why := assignop(n.Type(), t)
- if op == ir.OXXX {
- base.Errorf("cannot use %L as type %v in %s%s", n, t, context(), why)
- op = ir.OCONV
- }
-
- r := ir.NewConvExpr(base.Pos, op, t, n)
- r.SetTypecheck(1)
- r.SetImplicit(true)
- return r
+ typecheck.DotImportRefs = nil
}
// backingArrayPtrLen extracts the pointer and length from a slice or string.
@@ -475,14 +98,6 @@ func backingArrayPtrLen(n ir.Node) (ptr, length ir.Node) {
return ptr, length
}
-func syslook(name string) *ir.Name {
- s := ir.Pkgs.Runtime.Lookup(name)
- if s == nil || s.Def == nil {
- base.Fatalf("syslook: can't find runtime.%s", name)
- }
- return ir.AsNode(s.Def).(*ir.Name)
-}
-
// updateHasCall checks whether expression n contains any function
// calls and sets the n.HasCall flag if so.
func updateHasCall(n ir.Node) {
@@ -689,7 +304,7 @@ func safeexpr(n ir.Node, init *ir.Nodes) ir.Node {
}
a := ir.Copy(n).(*ir.UnaryExpr)
a.X = l
- return walkexpr(typecheck(a, ctxExpr), init)
+ return walkexpr(typecheck.Expr(a), init)
case ir.ODOT, ir.ODOTPTR:
n := n.(*ir.SelectorExpr)
@@ -699,7 +314,7 @@ func safeexpr(n ir.Node, init *ir.Nodes) ir.Node {
}
a := ir.Copy(n).(*ir.SelectorExpr)
a.X = l
- return walkexpr(typecheck(a, ctxExpr), init)
+ return walkexpr(typecheck.Expr(a), init)
case ir.ODEREF:
n := n.(*ir.StarExpr)
@@ -709,7 +324,7 @@ func safeexpr(n ir.Node, init *ir.Nodes) ir.Node {
}
a := ir.Copy(n).(*ir.StarExpr)
a.X = l
- return walkexpr(typecheck(a, ctxExpr), init)
+ return walkexpr(typecheck.Expr(a), init)
case ir.OINDEX, ir.OINDEXMAP:
n := n.(*ir.IndexExpr)
@@ -721,7 +336,7 @@ func safeexpr(n ir.Node, init *ir.Nodes) ir.Node {
a := ir.Copy(n).(*ir.IndexExpr)
a.X = l
a.Index = r
- return walkexpr(typecheck(a, ctxExpr), init)
+ return walkexpr(typecheck.Expr(a), init)
case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
n := n.(*ir.CompLitExpr)
@@ -738,7 +353,7 @@ func safeexpr(n ir.Node, init *ir.Nodes) ir.Node {
}
func copyexpr(n ir.Node, t *types.Type, init *ir.Nodes) ir.Node {
- l := temp(t)
+ l := typecheck.Temp(t)
appendWalkStmt(init, ir.NewAssignStmt(base.Pos, l, n))
return l
}
@@ -754,323 +369,6 @@ func cheapexpr(n ir.Node, init *ir.Nodes) ir.Node {
return copyexpr(n, n.Type(), init)
}
-// Code to resolve elided DOTs in embedded types.
-
-// A Dlist stores a pointer to a TFIELD Type embedded within
-// a TSTRUCT or TINTER Type.
-type Dlist struct {
- field *types.Field
-}
-
-// dotlist is used by adddot1 to record the path of embedded fields
-// used to access a target field or method.
-// Must be non-nil so that dotpath returns a non-nil slice even if d is zero.
-var dotlist = make([]Dlist, 10)
-
-// lookdot0 returns the number of fields or methods named s associated
-// with Type t. If exactly one exists, it will be returned in *save
-// (if save is not nil).
-func lookdot0(s *types.Sym, t *types.Type, save **types.Field, ignorecase bool) int {
- u := t
- if u.IsPtr() {
- u = u.Elem()
- }
-
- c := 0
- if u.IsStruct() || u.IsInterface() {
- for _, f := range u.Fields().Slice() {
- if f.Sym == s || (ignorecase && f.IsMethod() && strings.EqualFold(f.Sym.Name, s.Name)) {
- if save != nil {
- *save = f
- }
- c++
- }
- }
- }
-
- u = t
- if t.Sym() != nil && t.IsPtr() && !t.Elem().IsPtr() {
- // If t is a defined pointer type, then x.m is shorthand for (*x).m.
- u = t.Elem()
- }
- u = types.ReceiverBaseType(u)
- if u != nil {
- for _, f := range u.Methods().Slice() {
- if f.Embedded == 0 && (f.Sym == s || (ignorecase && strings.EqualFold(f.Sym.Name, s.Name))) {
- if save != nil {
- *save = f
- }
- c++
- }
- }
- }
-
- return c
-}
-
-// adddot1 returns the number of fields or methods named s at depth d in Type t.
-// If exactly one exists, it will be returned in *save (if save is not nil),
-// and dotlist will contain the path of embedded fields traversed to find it,
-// in reverse order. If none exist, more will indicate whether t contains any
-// embedded fields at depth d, so callers can decide whether to retry at
-// a greater depth.
-func adddot1(s *types.Sym, t *types.Type, d int, save **types.Field, ignorecase bool) (c int, more bool) {
- if t.Recur() {
- return
- }
- t.SetRecur(true)
- defer t.SetRecur(false)
-
- var u *types.Type
- d--
- if d < 0 {
- // We've reached our target depth. If t has any fields/methods
- // named s, then we're done. Otherwise, we still need to check
- // below for embedded fields.
- c = lookdot0(s, t, save, ignorecase)
- if c != 0 {
- return c, false
- }
- }
-
- u = t
- if u.IsPtr() {
- u = u.Elem()
- }
- if !u.IsStruct() && !u.IsInterface() {
- return c, false
- }
-
- for _, f := range u.Fields().Slice() {
- if f.Embedded == 0 || f.Sym == nil {
- continue
- }
- if d < 0 {
- // Found an embedded field at target depth.
- return c, true
- }
- a, more1 := adddot1(s, f.Type, d, save, ignorecase)
- if a != 0 && c == 0 {
- dotlist[d].field = f
- }
- c += a
- if more1 {
- more = true
- }
- }
-
- return c, more
-}
-
-// dotpath computes the unique shortest explicit selector path to fully qualify
-// a selection expression x.f, where x is of type t and f is the symbol s.
-// If no such path exists, dotpath returns nil.
-// If there are multiple shortest paths to the same depth, ambig is true.
-func dotpath(s *types.Sym, t *types.Type, save **types.Field, ignorecase bool) (path []Dlist, ambig bool) {
- // The embedding of types within structs imposes a tree structure onto
- // types: structs parent the types they embed, and types parent their
- // fields or methods. Our goal here is to find the shortest path to
- // a field or method named s in the subtree rooted at t. To accomplish
- // that, we iteratively perform depth-first searches of increasing depth
- // until we either find the named field/method or exhaust the tree.
- for d := 0; ; d++ {
- if d > len(dotlist) {
- dotlist = append(dotlist, Dlist{})
- }
- if c, more := adddot1(s, t, d, save, ignorecase); c == 1 {
- return dotlist[:d], false
- } else if c > 1 {
- return nil, true
- } else if !more {
- return nil, false
- }
- }
-}
-
-// in T.field
-// find missing fields that
-// will give shortest unique addressing.
-// modify the tree with missing type names.
-func adddot(n *ir.SelectorExpr) *ir.SelectorExpr {
- n.X = typecheck(n.X, ctxType|ctxExpr)
- if n.X.Diag() {
- n.SetDiag(true)
- }
- t := n.X.Type()
- if t == nil {
- return n
- }
-
- if n.X.Op() == ir.OTYPE {
- return n
- }
-
- s := n.Sel
- if s == nil {
- return n
- }
-
- switch path, ambig := dotpath(s, t, nil, false); {
- case path != nil:
- // rebuild elided dots
- for c := len(path) - 1; c >= 0; c-- {
- dot := ir.NewSelectorExpr(base.Pos, ir.ODOT, n.X, path[c].field.Sym)
- dot.SetImplicit(true)
- dot.SetType(path[c].field.Type)
- n.X = dot
- }
- case ambig:
- base.Errorf("ambiguous selector %v", n)
- n.X = nil
- }
-
- return n
-}
-
-// Code to help generate trampoline functions for methods on embedded
-// types. These are approx the same as the corresponding adddot
-// routines except that they expect to be called with unique tasks and
-// they return the actual methods.
-
-type Symlink struct {
- field *types.Field
-}
-
-var slist []Symlink
-
-func expand0(t *types.Type) {
- u := t
- if u.IsPtr() {
- u = u.Elem()
- }
-
- if u.IsInterface() {
- for _, f := range u.Fields().Slice() {
- if f.Sym.Uniq() {
- continue
- }
- f.Sym.SetUniq(true)
- slist = append(slist, Symlink{field: f})
- }
-
- return
- }
-
- u = types.ReceiverBaseType(t)
- if u != nil {
- for _, f := range u.Methods().Slice() {
- if f.Sym.Uniq() {
- continue
- }
- f.Sym.SetUniq(true)
- slist = append(slist, Symlink{field: f})
- }
- }
-}
-
-func expand1(t *types.Type, top bool) {
- if t.Recur() {
- return
- }
- t.SetRecur(true)
-
- if !top {
- expand0(t)
- }
-
- u := t
- if u.IsPtr() {
- u = u.Elem()
- }
-
- if u.IsStruct() || u.IsInterface() {
- for _, f := range u.Fields().Slice() {
- if f.Embedded == 0 {
- continue
- }
- if f.Sym == nil {
- continue
- }
- expand1(f.Type, false)
- }
- }
-
- t.SetRecur(false)
-}
-
-func expandmeth(t *types.Type) {
- if t == nil || t.AllMethods().Len() != 0 {
- return
- }
-
- // mark top-level method symbols
- // so that expand1 doesn't consider them.
- for _, f := range t.Methods().Slice() {
- f.Sym.SetUniq(true)
- }
-
- // generate all reachable methods
- slist = slist[:0]
- expand1(t, true)
-
- // check each method to be uniquely reachable
- var ms []*types.Field
- for i, sl := range slist {
- slist[i].field = nil
- sl.field.Sym.SetUniq(false)
-
- var f *types.Field
- path, _ := dotpath(sl.field.Sym, t, &f, false)
- if path == nil {
- continue
- }
-
- // dotpath may have dug out arbitrary fields, we only want methods.
- if !f.IsMethod() {
- continue
- }
-
- // add it to the base type method list
- f = f.Copy()
- f.Embedded = 1 // needs a trampoline
- for _, d := range path {
- if d.field.Type.IsPtr() {
- f.Embedded = 2
- break
- }
- }
- ms = append(ms, f)
- }
-
- for _, f := range t.Methods().Slice() {
- f.Sym.SetUniq(false)
- }
-
- ms = append(ms, t.Methods().Slice()...)
- sort.Sort(types.MethodsByName(ms))
- t.AllMethods().Set(ms)
-}
-
-// Given funarg struct list, return list of fn args.
-func structargs(tl *types.Type, mustname bool) []*ir.Field {
- var args []*ir.Field
- gen := 0
- for _, t := range tl.Fields().Slice() {
- s := t.Sym
- if mustname && (s == nil || s.Name == "_") {
- // invent a name so that we can refer to it in the trampoline
- s = lookupN(".anon", gen)
- gen++
- }
- a := ir.NewField(base.Pos, s, nil, t.Type)
- a.Pos = t.Pos
- a.IsDDD = t.IsDDD()
- args = append(args, a)
- }
-
- return args
-}
-
// Generate a wrapper function to convert from
// a receiver of type T to a receiver of type U.
// That is,
@@ -1110,14 +408,14 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
}
base.Pos = base.AutogeneratedPos
- dclcontext = ir.PEXTERN
+ typecheck.DeclContext = ir.PEXTERN
tfn := ir.NewFuncType(base.Pos,
- ir.NewField(base.Pos, lookup(".this"), nil, rcvr),
- structargs(method.Type.Params(), true),
- structargs(method.Type.Results(), false))
+ ir.NewField(base.Pos, typecheck.Lookup(".this"), nil, rcvr),
+ typecheck.NewFuncParams(method.Type.Params(), true),
+ typecheck.NewFuncParams(method.Type.Results(), false))
- fn := dclfunc(newnam, tfn)
+ fn := typecheck.DeclFunc(newnam, tfn)
fn.SetDupok(true)
nthis := ir.AsNode(tfn.Type().Recv().Nname)
@@ -1128,13 +426,13 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
if rcvr.IsPtr() && rcvr.Elem() == methodrcvr {
// generating wrapper from *T to T.
n := ir.NewIfStmt(base.Pos, nil, nil, nil)
- n.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, nthis, nodnil())
- call := ir.NewCallExpr(base.Pos, ir.OCALL, syslook("panicwrap"), nil)
+ n.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, nthis, typecheck.NodNil())
+ call := ir.NewCallExpr(base.Pos, ir.OCALL, typecheck.LookupRuntime("panicwrap"), nil)
n.Body = []ir.Node{call}
fn.Body.Append(n)
}
- dot := adddot(ir.NewSelectorExpr(base.Pos, ir.OXDOT, nthis, method.Sym))
+ dot := typecheck.AddImplicitDots(ir.NewSelectorExpr(base.Pos, ir.OXDOT, nthis, method.Sym))
// generate call
// It's not possible to use a tail call when dynamic linking on ppc64le. The
@@ -1147,9 +445,9 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
// generate tail call: adjust pointer receiver and jump to embedded method.
left := dot.X // skip final .M
if !left.Type().IsPtr() {
- left = nodAddr(left)
+ left = typecheck.NodAddr(left)
}
- as := ir.NewAssignStmt(base.Pos, nthis, convnop(left, rcvr))
+ as := ir.NewAssignStmt(base.Pos, nthis, typecheck.ConvNop(left, rcvr))
fn.Body.Append(as)
fn.Body.Append(ir.NewBranchStmt(base.Pos, ir.ORETJMP, ir.MethodSym(methodrcvr, method.Sym)))
} else {
@@ -1170,14 +468,14 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
ir.DumpList("genwrapper body", fn.Body)
}
- funcbody()
+ typecheck.FinishFuncBody()
if base.Debug.DclStack != 0 {
types.CheckDclstack()
}
- typecheckFunc(fn)
+ typecheck.Func(fn)
ir.CurFunc = fn
- typecheckslice(fn.Body, ctxStmt)
+ typecheck.Stmts(fn.Body)
// Inline calls within (*T).M wrappers. This is safe because we only
// generate those wrappers within the same compilation unit as (T).M.
@@ -1188,15 +486,15 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
escapeFuncs([]*ir.Func{fn}, false)
ir.CurFunc = nil
- Target.Decls = append(Target.Decls, fn)
+ typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
}
func hashmem(t *types.Type) ir.Node {
sym := ir.Pkgs.Runtime.Lookup("memhash")
- n := NewName(sym)
+ n := typecheck.NewName(sym)
ir.MarkFunc(n)
- n.SetType(functype(nil, []*ir.Field{
+ n.SetType(typecheck.NewFuncType(nil, []*ir.Field{
ir.NewField(base.Pos, nil, nil, types.NewPtr(t)),
ir.NewField(base.Pos, nil, nil, types.Types[types.TUINTPTR]),
ir.NewField(base.Pos, nil, nil, types.Types[types.TUINTPTR]),
@@ -1206,112 +504,6 @@ func hashmem(t *types.Type) ir.Node {
return n
}
-func ifacelookdot(s *types.Sym, t *types.Type, ignorecase bool) (m *types.Field, followptr bool) {
- if t == nil {
- return nil, false
- }
-
- path, ambig := dotpath(s, t, &m, ignorecase)
- if path == nil {
- if ambig {
- base.Errorf("%v.%v is ambiguous", t, s)
- }
- return nil, false
- }
-
- for _, d := range path {
- if d.field.Type.IsPtr() {
- followptr = true
- break
- }
- }
-
- if !m.IsMethod() {
- base.Errorf("%v.%v is a field, not a method", t, s)
- return nil, followptr
- }
-
- return m, followptr
-}
-
-func implements(t, iface *types.Type, m, samename **types.Field, ptr *int) bool {
- t0 := t
- if t == nil {
- return false
- }
-
- if t.IsInterface() {
- i := 0
- tms := t.Fields().Slice()
- for _, im := range iface.Fields().Slice() {
- for i < len(tms) && tms[i].Sym != im.Sym {
- i++
- }
- if i == len(tms) {
- *m = im
- *samename = nil
- *ptr = 0
- return false
- }
- tm := tms[i]
- if !types.Identical(tm.Type, im.Type) {
- *m = im
- *samename = tm
- *ptr = 0
- return false
- }
- }
-
- return true
- }
-
- t = types.ReceiverBaseType(t)
- var tms []*types.Field
- if t != nil {
- expandmeth(t)
- tms = t.AllMethods().Slice()
- }
- i := 0
- for _, im := range iface.Fields().Slice() {
- if im.Broke() {
- continue
- }
- for i < len(tms) && tms[i].Sym != im.Sym {
- i++
- }
- if i == len(tms) {
- *m = im
- *samename, _ = ifacelookdot(im.Sym, t, true)
- *ptr = 0
- return false
- }
- tm := tms[i]
- if tm.Nointerface() || !types.Identical(tm.Type, im.Type) {
- *m = im
- *samename = tm
- *ptr = 0
- return false
- }
- followptr := tm.Embedded == 2
-
- // if pointer receiver in method,
- // the method does not exist for value types.
- rcvr := tm.Type.Recv().Type
- if rcvr.IsPtr() && !t0.IsPtr() && !followptr && !types.IsInterfaceMethod(tm.Type) {
- if false && base.Flag.LowerR != 0 {
- base.Errorf("interface pointer mismatch")
- }
-
- *m = im
- *samename = nil
- *ptr = 1
- return false
- }
- }
-
- return true
-}
-
func ngotype(n ir.Node) *types.Sym {
if n.Type() != nil {
return typenamesym(n.Type())
diff --git a/src/cmd/compile/internal/gc/swt.go b/src/cmd/compile/internal/gc/swt.go
index 4e7ff00434..9ffa8b67bb 100644
--- a/src/cmd/compile/internal/gc/swt.go
+++ b/src/cmd/compile/internal/gc/swt.go
@@ -7,6 +7,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/src"
"go/constant"
@@ -14,221 +15,6 @@ import (
"sort"
)
-// typecheckswitch typechecks a switch statement.
-func typecheckswitch(n *ir.SwitchStmt) {
- typecheckslice(n.Init(), ctxStmt)
- if n.Tag != nil && n.Tag.Op() == ir.OTYPESW {
- typecheckTypeSwitch(n)
- } else {
- typecheckExprSwitch(n)
- }
-}
-
-func typecheckTypeSwitch(n *ir.SwitchStmt) {
- guard := n.Tag.(*ir.TypeSwitchGuard)
- guard.X = typecheck(guard.X, ctxExpr)
- t := guard.X.Type()
- if t != nil && !t.IsInterface() {
- base.ErrorfAt(n.Pos(), "cannot type switch on non-interface value %L", guard.X)
- t = nil
- }
-
- // We don't actually declare the type switch's guarded
- // declaration itself. So if there are no cases, we won't
- // notice that it went unused.
- if v := guard.Tag; v != nil && !ir.IsBlank(v) && len(n.Cases) == 0 {
- base.ErrorfAt(v.Pos(), "%v declared but not used", v.Sym())
- }
-
- var defCase, nilCase ir.Node
- var ts typeSet
- for _, ncase := range n.Cases {
- ncase := ncase.(*ir.CaseStmt)
- ls := ncase.List
- if len(ls) == 0 { // default:
- if defCase != nil {
- base.ErrorfAt(ncase.Pos(), "multiple defaults in switch (first at %v)", ir.Line(defCase))
- } else {
- defCase = ncase
- }
- }
-
- for i := range ls {
- ls[i] = typecheck(ls[i], ctxExpr|ctxType)
- n1 := ls[i]
- if t == nil || n1.Type() == nil {
- continue
- }
-
- var missing, have *types.Field
- var ptr int
- if ir.IsNil(n1) { // case nil:
- if nilCase != nil {
- base.ErrorfAt(ncase.Pos(), "multiple nil cases in type switch (first at %v)", ir.Line(nilCase))
- } else {
- nilCase = ncase
- }
- continue
- }
- if n1.Op() != ir.OTYPE {
- base.ErrorfAt(ncase.Pos(), "%L is not a type", n1)
- continue
- }
- if !n1.Type().IsInterface() && !implements(n1.Type(), t, &missing, &have, &ptr) && !missing.Broke() {
- if have != nil && !have.Broke() {
- base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
- " (wrong type for %v method)\n\thave %v%S\n\twant %v%S", guard.X, n1.Type(), missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
- } else if ptr != 0 {
- base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
- " (%v method has pointer receiver)", guard.X, n1.Type(), missing.Sym)
- } else {
- base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
- " (missing %v method)", guard.X, n1.Type(), missing.Sym)
- }
- continue
- }
-
- ts.add(ncase.Pos(), n1.Type())
- }
-
- if len(ncase.Vars) != 0 {
- // Assign the clause variable's type.
- vt := t
- if len(ls) == 1 {
- if ls[0].Op() == ir.OTYPE {
- vt = ls[0].Type()
- } else if !ir.IsNil(ls[0]) {
- // Invalid single-type case;
- // mark variable as broken.
- vt = nil
- }
- }
-
- nvar := ncase.Vars[0]
- nvar.SetType(vt)
- if vt != nil {
- nvar = typecheck(nvar, ctxExpr|ctxAssign)
- } else {
- // Clause variable is broken; prevent typechecking.
- nvar.SetTypecheck(1)
- nvar.SetWalkdef(1)
- }
- ncase.Vars[0] = nvar
- }
-
- typecheckslice(ncase.Body, ctxStmt)
- }
-}
-
-type typeSet struct {
- m map[string][]typeSetEntry
-}
-
-type typeSetEntry struct {
- pos src.XPos
- typ *types.Type
-}
-
-func (s *typeSet) add(pos src.XPos, typ *types.Type) {
- if s.m == nil {
- s.m = make(map[string][]typeSetEntry)
- }
-
- // LongString does not uniquely identify types, so we need to
- // disambiguate collisions with types.Identical.
- // TODO(mdempsky): Add a method that *is* unique.
- ls := typ.LongString()
- prevs := s.m[ls]
- for _, prev := range prevs {
- if types.Identical(typ, prev.typ) {
- base.ErrorfAt(pos, "duplicate case %v in type switch\n\tprevious case at %s", typ, base.FmtPos(prev.pos))
- return
- }
- }
- s.m[ls] = append(prevs, typeSetEntry{pos, typ})
-}
-
-func typecheckExprSwitch(n *ir.SwitchStmt) {
- t := types.Types[types.TBOOL]
- if n.Tag != nil {
- n.Tag = typecheck(n.Tag, ctxExpr)
- n.Tag = defaultlit(n.Tag, nil)
- t = n.Tag.Type()
- }
-
- var nilonly string
- if t != nil {
- switch {
- case t.IsMap():
- nilonly = "map"
- case t.Kind() == types.TFUNC:
- nilonly = "func"
- case t.IsSlice():
- nilonly = "slice"
-
- case !types.IsComparable(t):
- if t.IsStruct() {
- base.ErrorfAt(n.Pos(), "cannot switch on %L (struct containing %v cannot be compared)", n.Tag, types.IncomparableField(t).Type)
- } else {
- base.ErrorfAt(n.Pos(), "cannot switch on %L", n.Tag)
- }
- t = nil
- }
- }
-
- var defCase ir.Node
- var cs constSet
- for _, ncase := range n.Cases {
- ncase := ncase.(*ir.CaseStmt)
- ls := ncase.List
- if len(ls) == 0 { // default:
- if defCase != nil {
- base.ErrorfAt(ncase.Pos(), "multiple defaults in switch (first at %v)", ir.Line(defCase))
- } else {
- defCase = ncase
- }
- }
-
- for i := range ls {
- ir.SetPos(ncase)
- ls[i] = typecheck(ls[i], ctxExpr)
- ls[i] = defaultlit(ls[i], t)
- n1 := ls[i]
- if t == nil || n1.Type() == nil {
- continue
- }
-
- if nilonly != "" && !ir.IsNil(n1) {
- base.ErrorfAt(ncase.Pos(), "invalid case %v in switch (can only compare %s %v to nil)", n1, nilonly, n.Tag)
- } else if t.IsInterface() && !n1.Type().IsInterface() && !types.IsComparable(n1.Type()) {
- base.ErrorfAt(ncase.Pos(), "invalid case %L in switch (incomparable type)", n1)
- } else {
- op1, _ := assignop(n1.Type(), t)
- op2, _ := assignop(t, n1.Type())
- if op1 == ir.OXXX && op2 == ir.OXXX {
- if n.Tag != nil {
- base.ErrorfAt(ncase.Pos(), "invalid case %v in switch on %v (mismatched types %v and %v)", n1, n.Tag, n1.Type(), t)
- } else {
- base.ErrorfAt(ncase.Pos(), "invalid case %v in switch (mismatched types %v and bool)", n1, n1.Type())
- }
- }
- }
-
- // Don't check for duplicate bools. Although the spec allows it,
- // (1) the compiler hasn't checked it in the past, so compatibility mandates it, and
- // (2) it would disallow useful things like
- // case GOARCH == "arm" && GOARM == "5":
- // case GOARCH == "arm":
- // which would both evaluate to false for non-ARM compiles.
- if !n1.Type().IsBoolean() {
- cs.add(ncase.Pos(), n1, "case", "switch")
- }
- }
-
- typecheckslice(ncase.Body, ctxStmt)
- }
-}
-
// walkswitch walks a switch statement.
func walkswitch(sw *ir.SwitchStmt) {
// Guard against double walk, see #25776.
@@ -254,8 +40,8 @@ func walkExprSwitch(sw *ir.SwitchStmt) {
// convert switch {...} to switch true {...}
if cond == nil {
cond = ir.NewBool(true)
- cond = typecheck(cond, ctxExpr)
- cond = defaultlit(cond, nil)
+ cond = typecheck.Expr(cond)
+ cond = typecheck.DefaultLit(cond, nil)
}
// Given "switch string(byteslice)",
@@ -285,7 +71,7 @@ func walkExprSwitch(sw *ir.SwitchStmt) {
var body ir.Nodes
for _, ncase := range sw.Cases {
ncase := ncase.(*ir.CaseStmt)
- label := autolabel(".s")
+ label := typecheck.AutoLabel(".s")
jmp := ir.NewBranchStmt(ncase.Pos(), ir.OGOTO, label)
// Process case dispatch.
@@ -509,7 +295,7 @@ func walkTypeSwitch(sw *ir.SwitchStmt) {
s.facename = walkexpr(s.facename, sw.PtrInit())
s.facename = copyexpr(s.facename, s.facename.Type(), &sw.Compiled)
- s.okname = temp(types.Types[types.TBOOL])
+ s.okname = typecheck.Temp(types.Types[types.TBOOL])
// Get interface descriptor word.
// For empty interfaces this will be the type.
@@ -523,10 +309,10 @@ func walkTypeSwitch(sw *ir.SwitchStmt) {
// h := e._type.hash
// Use a similar strategy for non-empty interfaces.
ifNil := ir.NewIfStmt(base.Pos, nil, nil, nil)
- ifNil.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, itab, nodnil())
+ ifNil.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, itab, typecheck.NodNil())
base.Pos = base.Pos.WithNotStmt() // disable statement marks after the first check.
- ifNil.Cond = typecheck(ifNil.Cond, ctxExpr)
- ifNil.Cond = defaultlit(ifNil.Cond, nil)
+ ifNil.Cond = typecheck.Expr(ifNil.Cond)
+ ifNil.Cond = typecheck.DefaultLit(ifNil.Cond, nil)
// ifNil.Nbody assigned at end.
sw.Compiled.Append(ifNil)
@@ -561,7 +347,7 @@ func walkTypeSwitch(sw *ir.SwitchStmt) {
}
caseVarInitialized := false
- label := autolabel(".s")
+ label := typecheck.AutoLabel(".s")
jmp := ir.NewBranchStmt(ncase.Pos(), ir.OGOTO, label)
if len(ncase.List) == 0 { // default:
@@ -602,7 +388,7 @@ func walkTypeSwitch(sw *ir.SwitchStmt) {
ir.NewDecl(ncase.Pos(), ir.ODCL, caseVar),
ir.NewAssignStmt(ncase.Pos(), caseVar, val),
}
- typecheckslice(l, ctxStmt)
+ typecheck.Stmts(l)
body.Append(l...)
}
body.Append(ncase.Body...)
@@ -648,7 +434,7 @@ func (s *typeSwitch) Add(pos src.XPos, typ *types.Type, caseVar, jmp ir.Node) {
ir.NewDecl(pos, ir.ODCL, caseVar),
ir.NewAssignStmt(pos, caseVar, nil),
}
- typecheckslice(l, ctxStmt)
+ typecheck.Stmts(l)
body.Append(l...)
} else {
caseVar = ir.BlankNode
@@ -740,8 +526,8 @@ func binarySearch(n int, out *ir.Nodes, less func(i int) ir.Node, leaf func(i in
nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
leaf(i, nif)
base.Pos = base.Pos.WithNotStmt()
- nif.Cond = typecheck(nif.Cond, ctxExpr)
- nif.Cond = defaultlit(nif.Cond, nil)
+ nif.Cond = typecheck.Expr(nif.Cond)
+ nif.Cond = typecheck.DefaultLit(nif.Cond, nil)
out.Append(nif)
out = &nif.Else
}
@@ -752,8 +538,8 @@ func binarySearch(n int, out *ir.Nodes, less func(i int) ir.Node, leaf func(i in
nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
nif.Cond = less(half)
base.Pos = base.Pos.WithNotStmt()
- nif.Cond = typecheck(nif.Cond, ctxExpr)
- nif.Cond = defaultlit(nif.Cond, nil)
+ nif.Cond = typecheck.Expr(nif.Cond)
+ nif.Cond = typecheck.DefaultLit(nif.Cond, nil)
do(lo, half, &nif.Body)
do(half, hi, &nif.Else)
out.Append(nif)
diff --git a/src/cmd/compile/internal/gc/typecheck.go b/src/cmd/compile/internal/gc/typecheck.go
deleted file mode 100644
index 0552dd180f..0000000000
--- a/src/cmd/compile/internal/gc/typecheck.go
+++ /dev/null
@@ -1,4147 +0,0 @@
-// Copyright 2009 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package gc
-
-import (
- "cmd/compile/internal/base"
- "cmd/compile/internal/ir"
- "cmd/compile/internal/types"
- "fmt"
- "go/constant"
- "go/token"
- "strings"
-)
-
-var (
- NeedFuncSym = func(*types.Sym) {}
- NeedITab = func(t, itype *types.Type) {}
- NeedRuntimeType = func(*types.Type) {}
-)
-
-func TypecheckInit() {
- initUniverse()
- dclcontext = ir.PEXTERN
- base.Timer.Start("fe", "loadsys")
- loadsys()
-}
-
-func TypecheckPackage() {
- finishUniverse()
-
- typecheckok = true
-
- // Process top-level declarations in phases.
-
- // Phase 1: const, type, and names and types of funcs.
- // This will gather all the information about types
- // and methods but doesn't depend on any of it.
- //
- // We also defer type alias declarations until phase 2
- // to avoid cycles like #18640.
- // TODO(gri) Remove this again once we have a fix for #25838.
-
- // Don't use range--typecheck can add closures to Target.Decls.
- base.Timer.Start("fe", "typecheck", "top1")
- for i := 0; i < len(Target.Decls); i++ {
- n := Target.Decls[i]
- if op := n.Op(); op != ir.ODCL && op != ir.OAS && op != ir.OAS2 && (op != ir.ODCLTYPE || !n.(*ir.Decl).X.Name().Alias()) {
- Target.Decls[i] = typecheck(n, ctxStmt)
- }
- }
-
- // Phase 2: Variable assignments.
- // To check interface assignments, depends on phase 1.
-
- // Don't use range--typecheck can add closures to Target.Decls.
- base.Timer.Start("fe", "typecheck", "top2")
- for i := 0; i < len(Target.Decls); i++ {
- n := Target.Decls[i]
- if op := n.Op(); op == ir.ODCL || op == ir.OAS || op == ir.OAS2 || op == ir.ODCLTYPE && n.(*ir.Decl).X.Name().Alias() {
- Target.Decls[i] = typecheck(n, ctxStmt)
- }
- }
-
- // Phase 3: Type check function bodies.
- // Don't use range--typecheck can add closures to Target.Decls.
- base.Timer.Start("fe", "typecheck", "func")
- var fcount int64
- for i := 0; i < len(Target.Decls); i++ {
- n := Target.Decls[i]
- if n.Op() == ir.ODCLFUNC {
- TypecheckFuncBody(n.(*ir.Func))
- fcount++
- }
- }
-
- // Phase 4: Check external declarations.
- // TODO(mdempsky): This should be handled when type checking their
- // corresponding ODCL nodes.
- base.Timer.Start("fe", "typecheck", "externdcls")
- for i, n := range Target.Externs {
- if n.Op() == ir.ONAME {
- Target.Externs[i] = typecheck(Target.Externs[i], ctxExpr)
- }
- }
-
- // Phase 5: With all user code type-checked, it's now safe to verify map keys.
- checkMapKeys()
-
- // Phase 6: Decide how to capture closed variables.
- // This needs to run before escape analysis,
- // because variables captured by value do not escape.
- base.Timer.Start("fe", "capturevars")
- for _, n := range Target.Decls {
- if n.Op() == ir.ODCLFUNC {
- n := n.(*ir.Func)
- if n.OClosure != nil {
- ir.CurFunc = n
- capturevars(n)
- }
- }
- }
- capturevarscomplete = true
- ir.CurFunc = nil
-
- if base.Debug.TypecheckInl != 0 {
- // Typecheck imported function bodies if Debug.l > 1,
- // otherwise lazily when used or re-exported.
- TypecheckImports()
- }
-}
-
-func TypecheckAssignExpr(n ir.Node) ir.Node { return typecheck(n, ctxExpr|ctxAssign) }
-func TypecheckExpr(n ir.Node) ir.Node { return typecheck(n, ctxExpr) }
-func TypecheckStmt(n ir.Node) ir.Node { return typecheck(n, ctxStmt) }
-
-func TypecheckExprs(exprs []ir.Node) { typecheckslice(exprs, ctxExpr) }
-func TypecheckStmts(stmts []ir.Node) { typecheckslice(stmts, ctxStmt) }
-
-func TypecheckCall(call *ir.CallExpr) {
- t := call.X.Type()
- if t == nil {
- panic("misuse of Call")
- }
- ctx := ctxStmt
- if t.NumResults() > 0 {
- ctx = ctxExpr | ctxMultiOK
- }
- if typecheck(call, ctx) != call {
- panic("bad typecheck")
- }
-}
-
-func TypecheckCallee(n ir.Node) ir.Node {
- return typecheck(n, ctxExpr|ctxCallee)
-}
-
-func TypecheckFuncBody(n *ir.Func) {
- ir.CurFunc = n
- decldepth = 1
- errorsBefore := base.Errors()
- typecheckslice(n.Body, ctxStmt)
- checkreturn(n)
- if base.Errors() > errorsBefore {
- n.Body.Set(nil) // type errors; do not compile
- }
- // Now that we've checked whether n terminates,
- // we can eliminate some obviously dead code.
- deadcode(n)
-}
-
-var importlist []*ir.Func
-
-func TypecheckImports() {
- for _, n := range importlist {
- if n.Inl != nil {
- typecheckinl(n)
- }
- }
-}
-
-var traceIndent []byte
-
-func tracePrint(title string, n ir.Node) func(np *ir.Node) {
- indent := traceIndent
-
- // guard against nil
- var pos, op string
- var tc uint8
- if n != nil {
- pos = base.FmtPos(n.Pos())
- op = n.Op().String()
- tc = n.Typecheck()
- }
-
- types.SkipSizeForTracing = true
- defer func() { types.SkipSizeForTracing = false }()
- fmt.Printf("%s: %s%s %p %s %v tc=%d\n", pos, indent, title, n, op, n, tc)
- traceIndent = append(traceIndent, ". "...)
-
- return func(np *ir.Node) {
- traceIndent = traceIndent[:len(traceIndent)-2]
-
- // if we have a result, use that
- if np != nil {
- n = *np
- }
-
- // guard against nil
- // use outer pos, op so we don't get empty pos/op if n == nil (nicer output)
- var tc uint8
- var typ *types.Type
- if n != nil {
- pos = base.FmtPos(n.Pos())
- op = n.Op().String()
- tc = n.Typecheck()
- typ = n.Type()
- }
-
- types.SkipSizeForTracing = true
- defer func() { types.SkipSizeForTracing = false }()
- fmt.Printf("%s: %s=> %p %s %v tc=%d type=%L\n", pos, indent, n, op, n, tc, typ)
- }
-}
-
-const (
- ctxStmt = 1 << iota // evaluated at statement level
- ctxExpr // evaluated in value context
- ctxType // evaluated in type context
- ctxCallee // call-only expressions are ok
- ctxMultiOK // multivalue function returns are ok
- ctxAssign // assigning to expression
-)
-
-// type checks the whole tree of an expression.
-// calculates expression types.
-// evaluates compile time constants.
-// marks variables that escape the local frame.
-// rewrites n.Op to be more specific in some cases.
-
-var typecheckdefstack []ir.Node
-
-// resolve ONONAME to definition, if any.
-func resolve(n ir.Node) (res ir.Node) {
- if n == nil || n.Op() != ir.ONONAME {
- return n
- }
-
- // only trace if there's work to do
- if base.EnableTrace && base.Flag.LowerT {
- defer tracePrint("resolve", n)(&res)
- }
-
- if sym := n.Sym(); sym.Pkg != types.LocalPkg {
- // We might have an ir.Ident from oldname or importDot.
- if id, ok := n.(*ir.Ident); ok {
- if pkgName := dotImportRefs[id]; pkgName != nil {
- pkgName.Used = true
- }
- }
-
- if inimport {
- base.Fatalf("recursive inimport")
- }
- inimport = true
- n = expandDecl(n)
- inimport = false
- return n
- }
-
- r := ir.AsNode(n.Sym().Def)
- if r == nil {
- return n
- }
-
- if r.Op() == ir.OIOTA {
- if x := getIotaValue(); x >= 0 {
- return ir.NewInt(x)
- }
- return n
- }
-
- return r
-}
-
-func typecheckslice(l []ir.Node, top int) {
- for i := range l {
- l[i] = typecheck(l[i], top)
- }
-}
-
-var _typekind = []string{
- types.TINT: "int",
- types.TUINT: "uint",
- types.TINT8: "int8",
- types.TUINT8: "uint8",
- types.TINT16: "int16",
- types.TUINT16: "uint16",
- types.TINT32: "int32",
- types.TUINT32: "uint32",
- types.TINT64: "int64",
- types.TUINT64: "uint64",
- types.TUINTPTR: "uintptr",
- types.TCOMPLEX64: "complex64",
- types.TCOMPLEX128: "complex128",
- types.TFLOAT32: "float32",
- types.TFLOAT64: "float64",
- types.TBOOL: "bool",
- types.TSTRING: "string",
- types.TPTR: "pointer",
- types.TUNSAFEPTR: "unsafe.Pointer",
- types.TSTRUCT: "struct",
- types.TINTER: "interface",
- types.TCHAN: "chan",
- types.TMAP: "map",
- types.TARRAY: "array",
- types.TSLICE: "slice",
- types.TFUNC: "func",
- types.TNIL: "nil",
- types.TIDEAL: "untyped number",
-}
-
-func typekind(t *types.Type) string {
- if t.IsUntyped() {
- return fmt.Sprintf("%v", t)
- }
- et := t.Kind()
- if int(et) < len(_typekind) {
- s := _typekind[et]
- if s != "" {
- return s
- }
- }
- return fmt.Sprintf("etype=%d", et)
-}
-
-func cycleFor(start ir.Node) []ir.Node {
- // Find the start node in typecheck_tcstack.
- // We know that it must exist because each time we mark
- // a node with n.SetTypecheck(2) we push it on the stack,
- // and each time we mark a node with n.SetTypecheck(2) we
- // pop it from the stack. We hit a cycle when we encounter
- // a node marked 2 in which case is must be on the stack.
- i := len(typecheck_tcstack) - 1
- for i > 0 && typecheck_tcstack[i] != start {
- i--
- }
-
- // collect all nodes with same Op
- var cycle []ir.Node
- for _, n := range typecheck_tcstack[i:] {
- if n.Op() == start.Op() {
- cycle = append(cycle, n)
- }
- }
-
- return cycle
-}
-
-func cycleTrace(cycle []ir.Node) string {
- var s string
- for i, n := range cycle {
- s += fmt.Sprintf("\n\t%v: %v uses %v", ir.Line(n), n, cycle[(i+1)%len(cycle)])
- }
- return s
-}
-
-var typecheck_tcstack []ir.Node
-
-func typecheckFunc(fn *ir.Func) {
- new := typecheck(fn, ctxStmt)
- if new != fn {
- base.Fatalf("typecheck changed func")
- }
-}
-
-func typecheckNtype(n ir.Ntype) ir.Ntype {
- return typecheck(n, ctxType).(ir.Ntype)
-}
-
-// typecheck type checks node n.
-// The result of typecheck MUST be assigned back to n, e.g.
-// n.Left = typecheck(n.Left, top)
-func typecheck(n ir.Node, top int) (res ir.Node) {
- // cannot type check until all the source has been parsed
- if !typecheckok {
- base.Fatalf("early typecheck")
- }
-
- if n == nil {
- return nil
- }
-
- // only trace if there's work to do
- if base.EnableTrace && base.Flag.LowerT {
- defer tracePrint("typecheck", n)(&res)
- }
-
- lno := ir.SetPos(n)
-
- // Skip over parens.
- for n.Op() == ir.OPAREN {
- n = n.(*ir.ParenExpr).X
- }
-
- // Resolve definition of name and value of iota lazily.
- n = resolve(n)
-
- // Skip typecheck if already done.
- // But re-typecheck ONAME/OTYPE/OLITERAL/OPACK node in case context has changed.
- if n.Typecheck() == 1 {
- switch n.Op() {
- case ir.ONAME, ir.OTYPE, ir.OLITERAL, ir.OPACK:
- break
-
- default:
- base.Pos = lno
- return n
- }
- }
-
- if n.Typecheck() == 2 {
- // Typechecking loop. Trying printing a meaningful message,
- // otherwise a stack trace of typechecking.
- switch n.Op() {
- // We can already diagnose variables used as types.
- case ir.ONAME:
- n := n.(*ir.Name)
- if top&(ctxExpr|ctxType) == ctxType {
- base.Errorf("%v is not a type", n)
- }
-
- case ir.OTYPE:
- // Only report a type cycle if we are expecting a type.
- // Otherwise let other code report an error.
- if top&ctxType == ctxType {
- // A cycle containing only alias types is an error
- // since it would expand indefinitely when aliases
- // are substituted.
- cycle := cycleFor(n)
- for _, n1 := range cycle {
- if n1.Name() != nil && !n1.Name().Alias() {
- // Cycle is ok. But if n is an alias type and doesn't
- // have a type yet, we have a recursive type declaration
- // with aliases that we can't handle properly yet.
- // Report an error rather than crashing later.
- if n.Name() != nil && n.Name().Alias() && n.Type() == nil {
- base.Pos = n.Pos()
- base.Fatalf("cannot handle alias type declaration (issue #25838): %v", n)
- }
- base.Pos = lno
- return n
- }
- }
- base.ErrorfAt(n.Pos(), "invalid recursive type alias %v%s", n, cycleTrace(cycle))
- }
-
- case ir.OLITERAL:
- if top&(ctxExpr|ctxType) == ctxType {
- base.Errorf("%v is not a type", n)
- break
- }
- base.ErrorfAt(n.Pos(), "constant definition loop%s", cycleTrace(cycleFor(n)))
- }
-
- if base.Errors() == 0 {
- var trace string
- for i := len(typecheck_tcstack) - 1; i >= 0; i-- {
- x := typecheck_tcstack[i]
- trace += fmt.Sprintf("\n\t%v %v", ir.Line(x), x)
- }
- base.Errorf("typechecking loop involving %v%s", n, trace)
- }
-
- base.Pos = lno
- return n
- }
-
- typecheck_tcstack = append(typecheck_tcstack, n)
-
- n.SetTypecheck(2)
- n = typecheck1(n, top)
- n.SetTypecheck(1)
-
- last := len(typecheck_tcstack) - 1
- typecheck_tcstack[last] = nil
- typecheck_tcstack = typecheck_tcstack[:last]
-
- _, isExpr := n.(ir.Expr)
- _, isStmt := n.(ir.Stmt)
- isMulti := false
- switch n.Op() {
- case ir.OCALLFUNC, ir.OCALLINTER, ir.OCALLMETH:
- n := n.(*ir.CallExpr)
- if t := n.X.Type(); t != nil && t.Kind() == types.TFUNC {
- nr := t.NumResults()
- isMulti = nr > 1
- if nr == 0 {
- isExpr = false
- }
- }
- case ir.OAPPEND:
- // Must be used (and not BinaryExpr/UnaryExpr).
- isStmt = false
- case ir.OCLOSE, ir.ODELETE, ir.OPANIC, ir.OPRINT, ir.OPRINTN, ir.OVARKILL, ir.OVARLIVE:
- // Must not be used.
- isExpr = false
- isStmt = true
- case ir.OCOPY, ir.ORECOVER, ir.ORECV:
- // Can be used or not.
- isStmt = true
- }
-
- t := n.Type()
- if t != nil && !t.IsFuncArgStruct() && n.Op() != ir.OTYPE {
- switch t.Kind() {
- case types.TFUNC, // might have TANY; wait until it's called
- types.TANY, types.TFORW, types.TIDEAL, types.TNIL, types.TBLANK:
- break
-
- default:
- types.CheckSize(t)
- }
- }
- if t != nil {
- n = evalConst(n)
- t = n.Type()
- }
-
- // TODO(rsc): Lots of the complexity here is because typecheck can
- // see OTYPE, ONAME, and OLITERAL nodes multiple times.
- // Once we make the IR a proper tree, we should be able to simplify
- // this code a bit, especially the final case.
- switch {
- case top&(ctxStmt|ctxExpr) == ctxExpr && !isExpr && n.Op() != ir.OTYPE && !isMulti:
- if !n.Diag() {
- base.Errorf("%v used as value", n)
- n.SetDiag(true)
- }
- if t != nil {
- n.SetType(nil)
- }
-
- case top&ctxType == 0 && n.Op() == ir.OTYPE && t != nil:
- if !n.Type().Broke() {
- base.Errorf("type %v is not an expression", n.Type())
- }
- n.SetType(nil)
-
- case top&(ctxStmt|ctxExpr) == ctxStmt && !isStmt && t != nil:
- if !n.Diag() {
- base.Errorf("%v evaluated but not used", n)
- n.SetDiag(true)
- }
- n.SetType(nil)
-
- case top&(ctxType|ctxExpr) == ctxType && n.Op() != ir.OTYPE && n.Op() != ir.ONONAME && (t != nil || n.Op() == ir.ONAME):
- base.Errorf("%v is not a type", n)
- if t != nil {
- n.SetType(nil)
- }
-
- }
-
- base.Pos = lno
- return n
-}
-
-// indexlit implements typechecking of untyped values as
-// array/slice indexes. It is almost equivalent to defaultlit
-// but also accepts untyped numeric values representable as
-// value of type int (see also checkmake for comparison).
-// The result of indexlit MUST be assigned back to n, e.g.
-// n.Left = indexlit(n.Left)
-func indexlit(n ir.Node) ir.Node {
- if n != nil && n.Type() != nil && n.Type().Kind() == types.TIDEAL {
- return defaultlit(n, types.Types[types.TINT])
- }
- return n
-}
-
-// typecheck1 should ONLY be called from typecheck.
-func typecheck1(n ir.Node, top int) (res ir.Node) {
- if base.EnableTrace && base.Flag.LowerT {
- defer tracePrint("typecheck1", n)(&res)
- }
-
- switch n.Op() {
- case ir.OLITERAL, ir.ONAME, ir.ONONAME, ir.OTYPE:
- if n.Sym() == nil {
- return n
- }
-
- if n.Op() == ir.ONAME {
- n := n.(*ir.Name)
- if n.BuiltinOp != 0 && top&ctxCallee == 0 {
- base.Errorf("use of builtin %v not in function call", n.Sym())
- n.SetType(nil)
- return n
- }
- }
-
- typecheckdef(n)
- if n.Op() == ir.ONONAME {
- n.SetType(nil)
- return n
- }
- }
-
- switch n.Op() {
- default:
- ir.Dump("typecheck", n)
- base.Fatalf("typecheck %v", n.Op())
- panic("unreachable")
-
- // names
- case ir.OLITERAL:
- if n.Type() == nil && n.Val().Kind() == constant.String {
- base.Fatalf("string literal missing type")
- }
- return n
-
- case ir.ONIL, ir.ONONAME:
- return n
-
- case ir.ONAME:
- n := n.(*ir.Name)
- if n.Name().Decldepth == 0 {
- n.Name().Decldepth = decldepth
- }
- if n.BuiltinOp != 0 {
- return n
- }
- if top&ctxAssign == 0 {
- // not a write to the variable
- if ir.IsBlank(n) {
- base.Errorf("cannot use _ as value")
- n.SetType(nil)
- return n
- }
- n.Name().SetUsed(true)
- }
- return n
-
- case ir.ONAMEOFFSET:
- // type already set
- return n
-
- case ir.OPACK:
- n := n.(*ir.PkgName)
- base.Errorf("use of package %v without selector", n.Sym())
- n.SetType(nil)
- return n
-
- // types (ODEREF is with exprs)
- case ir.OTYPE:
- if n.Type() == nil {
- return n
- }
- return n
-
- case ir.OTSLICE:
- n := n.(*ir.SliceType)
- n.Elem = typecheck(n.Elem, ctxType)
- if n.Elem.Type() == nil {
- return n
- }
- t := types.NewSlice(n.Elem.Type())
- n.SetOTYPE(t)
- types.CheckSize(t)
- return n
-
- case ir.OTARRAY:
- n := n.(*ir.ArrayType)
- n.Elem = typecheck(n.Elem, ctxType)
- if n.Elem.Type() == nil {
- return n
- }
- if n.Len == nil { // [...]T
- if !n.Diag() {
- n.SetDiag(true)
- base.Errorf("use of [...] array outside of array literal")
- }
- return n
- }
- n.Len = indexlit(typecheck(n.Len, ctxExpr))
- size := n.Len
- if ir.ConstType(size) != constant.Int {
- switch {
- case size.Type() == nil:
- // Error already reported elsewhere.
- case size.Type().IsInteger() && size.Op() != ir.OLITERAL:
- base.Errorf("non-constant array bound %v", size)
- default:
- base.Errorf("invalid array bound %v", size)
- }
- return n
- }
-
- v := size.Val()
- if ir.ConstOverflow(v, types.Types[types.TINT]) {
- base.Errorf("array bound is too large")
- return n
- }
-
- if constant.Sign(v) < 0 {
- base.Errorf("array bound must be non-negative")
- return n
- }
-
- bound, _ := constant.Int64Val(v)
- t := types.NewArray(n.Elem.Type(), bound)
- n.SetOTYPE(t)
- types.CheckSize(t)
- return n
-
- case ir.OTMAP:
- n := n.(*ir.MapType)
- n.Key = typecheck(n.Key, ctxType)
- n.Elem = typecheck(n.Elem, ctxType)
- l := n.Key
- r := n.Elem
- if l.Type() == nil || r.Type() == nil {
- return n
- }
- if l.Type().NotInHeap() {
- base.Errorf("incomplete (or unallocatable) map key not allowed")
- }
- if r.Type().NotInHeap() {
- base.Errorf("incomplete (or unallocatable) map value not allowed")
- }
- n.SetOTYPE(types.NewMap(l.Type(), r.Type()))
- mapqueue = append(mapqueue, n) // check map keys when all types are settled
- return n
-
- case ir.OTCHAN:
- n := n.(*ir.ChanType)
- n.Elem = typecheck(n.Elem, ctxType)
- l := n.Elem
- if l.Type() == nil {
- return n
- }
- if l.Type().NotInHeap() {
- base.Errorf("chan of incomplete (or unallocatable) type not allowed")
- }
- n.SetOTYPE(types.NewChan(l.Type(), n.Dir))
- return n
-
- case ir.OTSTRUCT:
- n := n.(*ir.StructType)
- n.SetOTYPE(tostruct(n.Fields))
- return n
-
- case ir.OTINTER:
- n := n.(*ir.InterfaceType)
- n.SetOTYPE(tointerface(n.Methods))
- return n
-
- case ir.OTFUNC:
- n := n.(*ir.FuncType)
- n.SetOTYPE(functype(n.Recv, n.Params, n.Results))
- return n
-
- // type or expr
- case ir.ODEREF:
- n := n.(*ir.StarExpr)
- n.X = typecheck(n.X, ctxExpr|ctxType)
- l := n.X
- t := l.Type()
- if t == nil {
- n.SetType(nil)
- return n
- }
- if l.Op() == ir.OTYPE {
- n.SetOTYPE(types.NewPtr(l.Type()))
- // Ensure l.Type gets dowidth'd for the backend. Issue 20174.
- types.CheckSize(l.Type())
- return n
- }
-
- if !t.IsPtr() {
- if top&(ctxExpr|ctxStmt) != 0 {
- base.Errorf("invalid indirect of %L", n.X)
- n.SetType(nil)
- return n
- }
- base.Errorf("%v is not a type", l)
- return n
- }
-
- n.SetType(t.Elem())
- return n
-
- // arithmetic exprs
- case ir.OASOP,
- ir.OADD,
- ir.OAND,
- ir.OANDAND,
- ir.OANDNOT,
- ir.ODIV,
- ir.OEQ,
- ir.OGE,
- ir.OGT,
- ir.OLE,
- ir.OLT,
- ir.OLSH,
- ir.ORSH,
- ir.OMOD,
- ir.OMUL,
- ir.ONE,
- ir.OOR,
- ir.OOROR,
- ir.OSUB,
- ir.OXOR:
- var l, r ir.Node
- var setLR func()
- switch n := n.(type) {
- case *ir.AssignOpStmt:
- l, r = n.X, n.Y
- setLR = func() { n.X = l; n.Y = r }
- case *ir.BinaryExpr:
- l, r = n.X, n.Y
- setLR = func() { n.X = l; n.Y = r }
- case *ir.LogicalExpr:
- l, r = n.X, n.Y
- setLR = func() { n.X = l; n.Y = r }
- }
- l = typecheck(l, ctxExpr)
- r = typecheck(r, ctxExpr)
- setLR()
- if l.Type() == nil || r.Type() == nil {
- n.SetType(nil)
- return n
- }
- op := n.Op()
- if n.Op() == ir.OASOP {
- n := n.(*ir.AssignOpStmt)
- checkassign(n, l)
- if n.IncDec && !okforarith[l.Type().Kind()] {
- base.Errorf("invalid operation: %v (non-numeric type %v)", n, l.Type())
- n.SetType(nil)
- return n
- }
- // TODO(marvin): Fix Node.EType type union.
- op = n.AsOp
- }
- if op == ir.OLSH || op == ir.ORSH {
- r = defaultlit(r, types.Types[types.TUINT])
- setLR()
- t := r.Type()
- if !t.IsInteger() {
- base.Errorf("invalid operation: %v (shift count type %v, must be integer)", n, r.Type())
- n.SetType(nil)
- return n
- }
- if t.IsSigned() && !types.AllowsGoVersion(curpkg(), 1, 13) {
- base.ErrorfVers("go1.13", "invalid operation: %v (signed shift count type %v)", n, r.Type())
- n.SetType(nil)
- return n
- }
- t = l.Type()
- if t != nil && t.Kind() != types.TIDEAL && !t.IsInteger() {
- base.Errorf("invalid operation: %v (shift of type %v)", n, t)
- n.SetType(nil)
- return n
- }
-
- // no defaultlit for left
- // the outer context gives the type
- n.SetType(l.Type())
- if (l.Type() == types.UntypedFloat || l.Type() == types.UntypedComplex) && r.Op() == ir.OLITERAL {
- n.SetType(types.UntypedInt)
- }
- return n
- }
-
- // For "x == x && len(s)", it's better to report that "len(s)" (type int)
- // can't be used with "&&" than to report that "x == x" (type untyped bool)
- // can't be converted to int (see issue #41500).
- if n.Op() == ir.OANDAND || n.Op() == ir.OOROR {
- n := n.(*ir.LogicalExpr)
- if !n.X.Type().IsBoolean() {
- base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, n.Op(), typekind(n.X.Type()))
- n.SetType(nil)
- return n
- }
- if !n.Y.Type().IsBoolean() {
- base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, n.Op(), typekind(n.Y.Type()))
- n.SetType(nil)
- return n
- }
- }
-
- // ideal mixed with non-ideal
- l, r = defaultlit2(l, r, false)
- setLR()
-
- if l.Type() == nil || r.Type() == nil {
- n.SetType(nil)
- return n
- }
- t := l.Type()
- if t.Kind() == types.TIDEAL {
- t = r.Type()
- }
- et := t.Kind()
- if et == types.TIDEAL {
- et = types.TINT
- }
- aop := ir.OXXX
- if iscmp[n.Op()] && t.Kind() != types.TIDEAL && !types.Identical(l.Type(), r.Type()) {
- // comparison is okay as long as one side is
- // assignable to the other. convert so they have
- // the same type.
- //
- // the only conversion that isn't a no-op is concrete == interface.
- // in that case, check comparability of the concrete type.
- // The conversion allocates, so only do it if the concrete type is huge.
- converted := false
- if r.Type().Kind() != types.TBLANK {
- aop, _ = assignop(l.Type(), r.Type())
- if aop != ir.OXXX {
- if r.Type().IsInterface() && !l.Type().IsInterface() && !types.IsComparable(l.Type()) {
- base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(l.Type()))
- n.SetType(nil)
- return n
- }
-
- types.CalcSize(l.Type())
- if r.Type().IsInterface() == l.Type().IsInterface() || l.Type().Width >= 1<<16 {
- l = ir.NewConvExpr(base.Pos, aop, r.Type(), l)
- l.SetTypecheck(1)
- setLR()
- }
-
- t = r.Type()
- converted = true
- }
- }
-
- if !converted && l.Type().Kind() != types.TBLANK {
- aop, _ = assignop(r.Type(), l.Type())
- if aop != ir.OXXX {
- if l.Type().IsInterface() && !r.Type().IsInterface() && !types.IsComparable(r.Type()) {
- base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(r.Type()))
- n.SetType(nil)
- return n
- }
-
- types.CalcSize(r.Type())
- if r.Type().IsInterface() == l.Type().IsInterface() || r.Type().Width >= 1<<16 {
- r = ir.NewConvExpr(base.Pos, aop, l.Type(), r)
- r.SetTypecheck(1)
- setLR()
- }
-
- t = l.Type()
- }
- }
-
- et = t.Kind()
- }
-
- if t.Kind() != types.TIDEAL && !types.Identical(l.Type(), r.Type()) {
- l, r = defaultlit2(l, r, true)
- if l.Type() == nil || r.Type() == nil {
- n.SetType(nil)
- return n
- }
- if l.Type().IsInterface() == r.Type().IsInterface() || aop == 0 {
- base.Errorf("invalid operation: %v (mismatched types %v and %v)", n, l.Type(), r.Type())
- n.SetType(nil)
- return n
- }
- }
-
- if t.Kind() == types.TIDEAL {
- t = mixUntyped(l.Type(), r.Type())
- }
- if dt := defaultType(t); !okfor[op][dt.Kind()] {
- base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(t))
- n.SetType(nil)
- return n
- }
-
- // okfor allows any array == array, map == map, func == func.
- // restrict to slice/map/func == nil and nil == slice/map/func.
- if l.Type().IsArray() && !types.IsComparable(l.Type()) {
- base.Errorf("invalid operation: %v (%v cannot be compared)", n, l.Type())
- n.SetType(nil)
- return n
- }
-
- if l.Type().IsSlice() && !ir.IsNil(l) && !ir.IsNil(r) {
- base.Errorf("invalid operation: %v (slice can only be compared to nil)", n)
- n.SetType(nil)
- return n
- }
-
- if l.Type().IsMap() && !ir.IsNil(l) && !ir.IsNil(r) {
- base.Errorf("invalid operation: %v (map can only be compared to nil)", n)
- n.SetType(nil)
- return n
- }
-
- if l.Type().Kind() == types.TFUNC && !ir.IsNil(l) && !ir.IsNil(r) {
- base.Errorf("invalid operation: %v (func can only be compared to nil)", n)
- n.SetType(nil)
- return n
- }
-
- if l.Type().IsStruct() {
- if f := types.IncomparableField(l.Type()); f != nil {
- base.Errorf("invalid operation: %v (struct containing %v cannot be compared)", n, f.Type)
- n.SetType(nil)
- return n
- }
- }
-
- if iscmp[n.Op()] {
- t = types.UntypedBool
- n.SetType(t)
- if con := evalConst(n); con.Op() == ir.OLITERAL {
- return con
- }
- l, r = defaultlit2(l, r, true)
- setLR()
- return n
- }
-
- if et == types.TSTRING && n.Op() == ir.OADD {
- // create or update OADDSTR node with list of strings in x + y + z + (w + v) + ...
- n := n.(*ir.BinaryExpr)
- var add *ir.AddStringExpr
- if l.Op() == ir.OADDSTR {
- add = l.(*ir.AddStringExpr)
- add.SetPos(n.Pos())
- } else {
- add = ir.NewAddStringExpr(n.Pos(), []ir.Node{l})
- }
- if r.Op() == ir.OADDSTR {
- r := r.(*ir.AddStringExpr)
- add.List.Append(r.List.Take()...)
- } else {
- add.List.Append(r)
- }
- add.SetType(t)
- return add
- }
-
- if (op == ir.ODIV || op == ir.OMOD) && ir.IsConst(r, constant.Int) {
- if constant.Sign(r.Val()) == 0 {
- base.Errorf("division by zero")
- n.SetType(nil)
- return n
- }
- }
-
- n.SetType(t)
- return n
-
- case ir.OBITNOT, ir.ONEG, ir.ONOT, ir.OPLUS:
- n := n.(*ir.UnaryExpr)
- n.X = typecheck(n.X, ctxExpr)
- l := n.X
- t := l.Type()
- if t == nil {
- n.SetType(nil)
- return n
- }
- if !okfor[n.Op()][defaultType(t).Kind()] {
- base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, n.Op(), typekind(t))
- n.SetType(nil)
- return n
- }
-
- n.SetType(t)
- return n
-
- // exprs
- case ir.OADDR:
- n := n.(*ir.AddrExpr)
- n.X = typecheck(n.X, ctxExpr)
- if n.X.Type() == nil {
- n.SetType(nil)
- return n
- }
-
- switch n.X.Op() {
- case ir.OARRAYLIT, ir.OMAPLIT, ir.OSLICELIT, ir.OSTRUCTLIT:
- n.SetOp(ir.OPTRLIT)
-
- default:
- checklvalue(n.X, "take the address of")
- r := ir.OuterValue(n.X)
- if r.Op() == ir.ONAME {
- r := r.(*ir.Name)
- if ir.Orig(r) != r {
- base.Fatalf("found non-orig name node %v", r) // TODO(mdempsky): What does this mean?
- }
- r.Name().SetAddrtaken(true)
- if r.Name().IsClosureVar() && !capturevarscomplete {
- // Mark the original variable as Addrtaken so that capturevars
- // knows not to pass it by value.
- // But if the capturevars phase is complete, don't touch it,
- // in case l.Name's containing function has not yet been compiled.
- r.Name().Defn.Name().SetAddrtaken(true)
- }
- }
- n.X = defaultlit(n.X, nil)
- if n.X.Type() == nil {
- n.SetType(nil)
- return n
- }
- }
-
- n.SetType(types.NewPtr(n.X.Type()))
- return n
-
- case ir.OCOMPLIT:
- return typecheckcomplit(n.(*ir.CompLitExpr))
-
- case ir.OXDOT, ir.ODOT:
- n := n.(*ir.SelectorExpr)
- if n.Op() == ir.OXDOT {
- n = adddot(n)
- n.SetOp(ir.ODOT)
- if n.X == nil {
- n.SetType(nil)
- return n
- }
- }
-
- n.X = typecheck(n.X, ctxExpr|ctxType)
-
- n.X = defaultlit(n.X, nil)
-
- t := n.X.Type()
- if t == nil {
- base.UpdateErrorDot(ir.Line(n), fmt.Sprint(n.X), fmt.Sprint(n))
- n.SetType(nil)
- return n
- }
-
- s := n.Sel
-
- if n.X.Op() == ir.OTYPE {
- return typecheckMethodExpr(n)
- }
-
- if t.IsPtr() && !t.Elem().IsInterface() {
- t = t.Elem()
- if t == nil {
- n.SetType(nil)
- return n
- }
- n.SetOp(ir.ODOTPTR)
- types.CheckSize(t)
- }
-
- if n.Sel.IsBlank() {
- base.Errorf("cannot refer to blank field or method")
- n.SetType(nil)
- return n
- }
-
- if lookdot(n, t, 0) == nil {
- // Legitimate field or method lookup failed, try to explain the error
- switch {
- case t.IsEmptyInterface():
- base.Errorf("%v undefined (type %v is interface with no methods)", n, n.X.Type())
-
- case t.IsPtr() && t.Elem().IsInterface():
- // Pointer to interface is almost always a mistake.
- base.Errorf("%v undefined (type %v is pointer to interface, not interface)", n, n.X.Type())
-
- case lookdot(n, t, 1) != nil:
- // Field or method matches by name, but it is not exported.
- base.Errorf("%v undefined (cannot refer to unexported field or method %v)", n, n.Sel)
-
- default:
- if mt := lookdot(n, t, 2); mt != nil && visible(mt.Sym) { // Case-insensitive lookup.
- base.Errorf("%v undefined (type %v has no field or method %v, but does have %v)", n, n.X.Type(), n.Sel, mt.Sym)
- } else {
- base.Errorf("%v undefined (type %v has no field or method %v)", n, n.X.Type(), n.Sel)
- }
- }
- n.SetType(nil)
- return n
- }
-
- if (n.Op() == ir.ODOTINTER || n.Op() == ir.ODOTMETH) && top&ctxCallee == 0 {
- return typecheckpartialcall(n, s)
- }
- return n
-
- case ir.ODOTTYPE:
- n := n.(*ir.TypeAssertExpr)
- n.X = typecheck(n.X, ctxExpr)
- n.X = defaultlit(n.X, nil)
- l := n.X
- t := l.Type()
- if t == nil {
- n.SetType(nil)
- return n
- }
- if !t.IsInterface() {
- base.Errorf("invalid type assertion: %v (non-interface type %v on left)", n, t)
- n.SetType(nil)
- return n
- }
-
- if n.Ntype != nil {
- n.Ntype = typecheck(n.Ntype, ctxType)
- n.SetType(n.Ntype.Type())
- n.Ntype = nil
- if n.Type() == nil {
- return n
- }
- }
-
- if n.Type() != nil && !n.Type().IsInterface() {
- var missing, have *types.Field
- var ptr int
- if !implements(n.Type(), t, &missing, &have, &ptr) {
- if have != nil && have.Sym == missing.Sym {
- base.Errorf("impossible type assertion:\n\t%v does not implement %v (wrong type for %v method)\n"+
- "\t\thave %v%S\n\t\twant %v%S", n.Type(), t, missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
- } else if ptr != 0 {
- base.Errorf("impossible type assertion:\n\t%v does not implement %v (%v method has pointer receiver)", n.Type(), t, missing.Sym)
- } else if have != nil {
- base.Errorf("impossible type assertion:\n\t%v does not implement %v (missing %v method)\n"+
- "\t\thave %v%S\n\t\twant %v%S", n.Type(), t, missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
- } else {
- base.Errorf("impossible type assertion:\n\t%v does not implement %v (missing %v method)", n.Type(), t, missing.Sym)
- }
- n.SetType(nil)
- return n
- }
- }
- return n
-
- case ir.OINDEX:
- n := n.(*ir.IndexExpr)
- n.X = typecheck(n.X, ctxExpr)
- n.X = defaultlit(n.X, nil)
- n.X = implicitstar(n.X)
- l := n.X
- n.Index = typecheck(n.Index, ctxExpr)
- r := n.Index
- t := l.Type()
- if t == nil || r.Type() == nil {
- n.SetType(nil)
- return n
- }
- switch t.Kind() {
- default:
- base.Errorf("invalid operation: %v (type %v does not support indexing)", n, t)
- n.SetType(nil)
- return n
-
- case types.TSTRING, types.TARRAY, types.TSLICE:
- n.Index = indexlit(n.Index)
- if t.IsString() {
- n.SetType(types.ByteType)
- } else {
- n.SetType(t.Elem())
- }
- why := "string"
- if t.IsArray() {
- why = "array"
- } else if t.IsSlice() {
- why = "slice"
- }
-
- if n.Index.Type() != nil && !n.Index.Type().IsInteger() {
- base.Errorf("non-integer %s index %v", why, n.Index)
- return n
- }
-
- if !n.Bounded() && ir.IsConst(n.Index, constant.Int) {
- x := n.Index.Val()
- if constant.Sign(x) < 0 {
- base.Errorf("invalid %s index %v (index must be non-negative)", why, n.Index)
- } else if t.IsArray() && constant.Compare(x, token.GEQ, constant.MakeInt64(t.NumElem())) {
- base.Errorf("invalid array index %v (out of bounds for %d-element array)", n.Index, t.NumElem())
- } else if ir.IsConst(n.X, constant.String) && constant.Compare(x, token.GEQ, constant.MakeInt64(int64(len(ir.StringVal(n.X))))) {
- base.Errorf("invalid string index %v (out of bounds for %d-byte string)", n.Index, len(ir.StringVal(n.X)))
- } else if ir.ConstOverflow(x, types.Types[types.TINT]) {
- base.Errorf("invalid %s index %v (index too large)", why, n.Index)
- }
- }
-
- case types.TMAP:
- n.Index = assignconv(n.Index, t.Key(), "map index")
- n.SetType(t.Elem())
- n.SetOp(ir.OINDEXMAP)
- n.Assigned = false
- }
- return n
-
- case ir.ORECV:
- n := n.(*ir.UnaryExpr)
- n.X = typecheck(n.X, ctxExpr)
- n.X = defaultlit(n.X, nil)
- l := n.X
- t := l.Type()
- if t == nil {
- n.SetType(nil)
- return n
- }
- if !t.IsChan() {
- base.Errorf("invalid operation: %v (receive from non-chan type %v)", n, t)
- n.SetType(nil)
- return n
- }
-
- if !t.ChanDir().CanRecv() {
- base.Errorf("invalid operation: %v (receive from send-only type %v)", n, t)
- n.SetType(nil)
- return n
- }
-
- n.SetType(t.Elem())
- return n
-
- case ir.OSEND:
- n := n.(*ir.SendStmt)
- n.Chan = typecheck(n.Chan, ctxExpr)
- n.Value = typecheck(n.Value, ctxExpr)
- n.Chan = defaultlit(n.Chan, nil)
- t := n.Chan.Type()
- if t == nil {
- return n
- }
- if !t.IsChan() {
- base.Errorf("invalid operation: %v (send to non-chan type %v)", n, t)
- return n
- }
-
- if !t.ChanDir().CanSend() {
- base.Errorf("invalid operation: %v (send to receive-only type %v)", n, t)
- return n
- }
-
- n.Value = assignconv(n.Value, t.Elem(), "send")
- if n.Value.Type() == nil {
- return n
- }
- return n
-
- case ir.OSLICEHEADER:
- // Errors here are Fatalf instead of Errorf because only the compiler
- // can construct an OSLICEHEADER node.
- // Components used in OSLICEHEADER that are supplied by parsed source code
- // have already been typechecked in e.g. OMAKESLICE earlier.
- n := n.(*ir.SliceHeaderExpr)
- t := n.Type()
- if t == nil {
- base.Fatalf("no type specified for OSLICEHEADER")
- }
-
- if !t.IsSlice() {
- base.Fatalf("invalid type %v for OSLICEHEADER", n.Type())
- }
-
- if n.Ptr == nil || n.Ptr.Type() == nil || !n.Ptr.Type().IsUnsafePtr() {
- base.Fatalf("need unsafe.Pointer for OSLICEHEADER")
- }
-
- if x := len(n.LenCap); x != 2 {
- base.Fatalf("expected 2 params (len, cap) for OSLICEHEADER, got %d", x)
- }
-
- n.Ptr = typecheck(n.Ptr, ctxExpr)
- l := typecheck(n.LenCap[0], ctxExpr)
- c := typecheck(n.LenCap[1], ctxExpr)
- l = defaultlit(l, types.Types[types.TINT])
- c = defaultlit(c, types.Types[types.TINT])
-
- if ir.IsConst(l, constant.Int) && ir.Int64Val(l) < 0 {
- base.Fatalf("len for OSLICEHEADER must be non-negative")
- }
-
- if ir.IsConst(c, constant.Int) && ir.Int64Val(c) < 0 {
- base.Fatalf("cap for OSLICEHEADER must be non-negative")
- }
-
- if ir.IsConst(l, constant.Int) && ir.IsConst(c, constant.Int) && constant.Compare(l.Val(), token.GTR, c.Val()) {
- base.Fatalf("len larger than cap for OSLICEHEADER")
- }
-
- n.LenCap[0] = l
- n.LenCap[1] = c
- return n
-
- case ir.OMAKESLICECOPY:
- // Errors here are Fatalf instead of Errorf because only the compiler
- // can construct an OMAKESLICECOPY node.
- // Components used in OMAKESCLICECOPY that are supplied by parsed source code
- // have already been typechecked in OMAKE and OCOPY earlier.
- n := n.(*ir.MakeExpr)
- t := n.Type()
-
- if t == nil {
- base.Fatalf("no type specified for OMAKESLICECOPY")
- }
-
- if !t.IsSlice() {
- base.Fatalf("invalid type %v for OMAKESLICECOPY", n.Type())
- }
-
- if n.Len == nil {
- base.Fatalf("missing len argument for OMAKESLICECOPY")
- }
-
- if n.Cap == nil {
- base.Fatalf("missing slice argument to copy for OMAKESLICECOPY")
- }
-
- n.Len = typecheck(n.Len, ctxExpr)
- n.Cap = typecheck(n.Cap, ctxExpr)
-
- n.Len = defaultlit(n.Len, types.Types[types.TINT])
-
- if !n.Len.Type().IsInteger() && n.Type().Kind() != types.TIDEAL {
- base.Errorf("non-integer len argument in OMAKESLICECOPY")
- }
-
- if ir.IsConst(n.Len, constant.Int) {
- if ir.ConstOverflow(n.Len.Val(), types.Types[types.TINT]) {
- base.Fatalf("len for OMAKESLICECOPY too large")
- }
- if constant.Sign(n.Len.Val()) < 0 {
- base.Fatalf("len for OMAKESLICECOPY must be non-negative")
- }
- }
- return n
-
- case ir.OSLICE, ir.OSLICE3:
- n := n.(*ir.SliceExpr)
- n.X = typecheck(n.X, ctxExpr)
- low, high, max := n.SliceBounds()
- hasmax := n.Op().IsSlice3()
- low = typecheck(low, ctxExpr)
- high = typecheck(high, ctxExpr)
- max = typecheck(max, ctxExpr)
- n.X = defaultlit(n.X, nil)
- low = indexlit(low)
- high = indexlit(high)
- max = indexlit(max)
- n.SetSliceBounds(low, high, max)
- l := n.X
- if l.Type() == nil {
- n.SetType(nil)
- return n
- }
- if l.Type().IsArray() {
- if !ir.IsAssignable(n.X) {
- base.Errorf("invalid operation %v (slice of unaddressable value)", n)
- n.SetType(nil)
- return n
- }
-
- addr := nodAddr(n.X)
- addr.SetImplicit(true)
- n.X = typecheck(addr, ctxExpr)
- l = n.X
- }
- t := l.Type()
- var tp *types.Type
- if t.IsString() {
- if hasmax {
- base.Errorf("invalid operation %v (3-index slice of string)", n)
- n.SetType(nil)
- return n
- }
- n.SetType(t)
- n.SetOp(ir.OSLICESTR)
- } else if t.IsPtr() && t.Elem().IsArray() {
- tp = t.Elem()
- n.SetType(types.NewSlice(tp.Elem()))
- types.CalcSize(n.Type())
- if hasmax {
- n.SetOp(ir.OSLICE3ARR)
- } else {
- n.SetOp(ir.OSLICEARR)
- }
- } else if t.IsSlice() {
- n.SetType(t)
- } else {
- base.Errorf("cannot slice %v (type %v)", l, t)
- n.SetType(nil)
- return n
- }
-
- if low != nil && !checksliceindex(l, low, tp) {
- n.SetType(nil)
- return n
- }
- if high != nil && !checksliceindex(l, high, tp) {
- n.SetType(nil)
- return n
- }
- if max != nil && !checksliceindex(l, max, tp) {
- n.SetType(nil)
- return n
- }
- if !checksliceconst(low, high) || !checksliceconst(low, max) || !checksliceconst(high, max) {
- n.SetType(nil)
- return n
- }
- return n
-
- // call and call like
- case ir.OCALL:
- n := n.(*ir.CallExpr)
- n.Use = ir.CallUseExpr
- if top == ctxStmt {
- n.Use = ir.CallUseStmt
- }
- typecheckslice(n.Init(), ctxStmt) // imported rewritten f(g()) calls (#30907)
- n.X = typecheck(n.X, ctxExpr|ctxType|ctxCallee)
- if n.X.Diag() {
- n.SetDiag(true)
- }
-
- l := n.X
-
- if l.Op() == ir.ONAME && l.(*ir.Name).BuiltinOp != 0 {
- l := l.(*ir.Name)
- if n.IsDDD && l.BuiltinOp != ir.OAPPEND {
- base.Errorf("invalid use of ... with builtin %v", l)
- }
-
- // builtin: OLEN, OCAP, etc.
- switch l.BuiltinOp {
- default:
- base.Fatalf("unknown builtin %v", l)
-
- case ir.OAPPEND, ir.ODELETE, ir.OMAKE, ir.OPRINT, ir.OPRINTN, ir.ORECOVER:
- n.SetOp(l.BuiltinOp)
- n.X = nil
- n.SetTypecheck(0) // re-typechecking new op is OK, not a loop
- return typecheck(n, top)
-
- case ir.OCAP, ir.OCLOSE, ir.OIMAG, ir.OLEN, ir.OPANIC, ir.OREAL:
- typecheckargs(n)
- fallthrough
- case ir.ONEW, ir.OALIGNOF, ir.OOFFSETOF, ir.OSIZEOF:
- arg, ok := needOneArg(n, "%v", n.Op())
- if !ok {
- n.SetType(nil)
- return n
- }
- u := ir.NewUnaryExpr(n.Pos(), l.BuiltinOp, arg)
- return typecheck(ir.InitExpr(n.Init(), u), top) // typecheckargs can add to old.Init
-
- case ir.OCOMPLEX, ir.OCOPY:
- typecheckargs(n)
- arg1, arg2, ok := needTwoArgs(n)
- if !ok {
- n.SetType(nil)
- return n
- }
- b := ir.NewBinaryExpr(n.Pos(), l.BuiltinOp, arg1, arg2)
- return typecheck(ir.InitExpr(n.Init(), b), top) // typecheckargs can add to old.Init
- }
- panic("unreachable")
- }
-
- n.X = defaultlit(n.X, nil)
- l = n.X
- if l.Op() == ir.OTYPE {
- if n.IsDDD {
- if !l.Type().Broke() {
- base.Errorf("invalid use of ... in type conversion to %v", l.Type())
- }
- n.SetDiag(true)
- }
-
- // pick off before type-checking arguments
- arg, ok := needOneArg(n, "conversion to %v", l.Type())
- if !ok {
- n.SetType(nil)
- return n
- }
-
- n := ir.NewConvExpr(n.Pos(), ir.OCONV, nil, arg)
- n.SetType(l.Type())
- return typecheck1(n, top)
- }
-
- typecheckargs(n)
- t := l.Type()
- if t == nil {
- n.SetType(nil)
- return n
- }
- types.CheckSize(t)
-
- switch l.Op() {
- case ir.ODOTINTER:
- n.SetOp(ir.OCALLINTER)
-
- case ir.ODOTMETH:
- l := l.(*ir.SelectorExpr)
- n.SetOp(ir.OCALLMETH)
-
- // typecheckaste was used here but there wasn't enough
- // information further down the call chain to know if we
- // were testing a method receiver for unexported fields.
- // It isn't necessary, so just do a sanity check.
- tp := t.Recv().Type
-
- if l.X == nil || !types.Identical(l.X.Type(), tp) {
- base.Fatalf("method receiver")
- }
-
- default:
- n.SetOp(ir.OCALLFUNC)
- if t.Kind() != types.TFUNC {
- // TODO(mdempsky): Remove "o.Sym() != nil" once we stop
- // using ir.Name for numeric literals.
- if o := ir.Orig(l); o.Name() != nil && o.Sym() != nil && types.BuiltinPkg.Lookup(o.Sym().Name).Def != nil {
- // be more specific when the non-function
- // name matches a predeclared function
- base.Errorf("cannot call non-function %L, declared at %s",
- l, base.FmtPos(o.Name().Pos()))
- } else {
- base.Errorf("cannot call non-function %L", l)
- }
- n.SetType(nil)
- return n
- }
- }
-
- typecheckaste(ir.OCALL, n.X, n.IsDDD, t.Params(), n.Args, func() string { return fmt.Sprintf("argument to %v", n.X) })
- if t.NumResults() == 0 {
- return n
- }
- if t.NumResults() == 1 {
- n.SetType(l.Type().Results().Field(0).Type)
-
- if n.Op() == ir.OCALLFUNC && n.X.Op() == ir.ONAME {
- if sym := n.X.(*ir.Name).Sym(); types.IsRuntimePkg(sym.Pkg) && sym.Name == "getg" {
- // Emit code for runtime.getg() directly instead of calling function.
- // Most such rewrites (for example the similar one for math.Sqrt) should be done in walk,
- // so that the ordering pass can make sure to preserve the semantics of the original code
- // (in particular, the exact time of the function call) by introducing temporaries.
- // In this case, we know getg() always returns the same result within a given function
- // and we want to avoid the temporaries, so we do the rewrite earlier than is typical.
- n.SetOp(ir.OGETG)
- }
- }
- return n
- }
-
- // multiple return
- if top&(ctxMultiOK|ctxStmt) == 0 {
- base.Errorf("multiple-value %v() in single-value context", l)
- return n
- }
-
- n.SetType(l.Type().Results())
- return n
-
- case ir.OALIGNOF, ir.OOFFSETOF, ir.OSIZEOF:
- n := n.(*ir.UnaryExpr)
- n.SetType(types.Types[types.TUINTPTR])
- return n
-
- case ir.OCAP, ir.OLEN:
- n := n.(*ir.UnaryExpr)
- n.X = typecheck(n.X, ctxExpr)
- n.X = defaultlit(n.X, nil)
- n.X = implicitstar(n.X)
- l := n.X
- t := l.Type()
- if t == nil {
- n.SetType(nil)
- return n
- }
-
- var ok bool
- if n.Op() == ir.OLEN {
- ok = okforlen[t.Kind()]
- } else {
- ok = okforcap[t.Kind()]
- }
- if !ok {
- base.Errorf("invalid argument %L for %v", l, n.Op())
- n.SetType(nil)
- return n
- }
-
- n.SetType(types.Types[types.TINT])
- return n
-
- case ir.OREAL, ir.OIMAG:
- n := n.(*ir.UnaryExpr)
- n.X = typecheck(n.X, ctxExpr)
- l := n.X
- t := l.Type()
- if t == nil {
- n.SetType(nil)
- return n
- }
-
- // Determine result type.
- switch t.Kind() {
- case types.TIDEAL:
- n.SetType(types.UntypedFloat)
- case types.TCOMPLEX64:
- n.SetType(types.Types[types.TFLOAT32])
- case types.TCOMPLEX128:
- n.SetType(types.Types[types.TFLOAT64])
- default:
- base.Errorf("invalid argument %L for %v", l, n.Op())
- n.SetType(nil)
- return n
- }
- return n
-
- case ir.OCOMPLEX:
- n := n.(*ir.BinaryExpr)
- l := typecheck(n.X, ctxExpr)
- r := typecheck(n.Y, ctxExpr)
- if l.Type() == nil || r.Type() == nil {
- n.SetType(nil)
- return n
- }
- l, r = defaultlit2(l, r, false)
- if l.Type() == nil || r.Type() == nil {
- n.SetType(nil)
- return n
- }
- n.X = l
- n.Y = r
-
- if !types.Identical(l.Type(), r.Type()) {
- base.Errorf("invalid operation: %v (mismatched types %v and %v)", n, l.Type(), r.Type())
- n.SetType(nil)
- return n
- }
-
- var t *types.Type
- switch l.Type().Kind() {
- default:
- base.Errorf("invalid operation: %v (arguments have type %v, expected floating-point)", n, l.Type())
- n.SetType(nil)
- return n
-
- case types.TIDEAL:
- t = types.UntypedComplex
-
- case types.TFLOAT32:
- t = types.Types[types.TCOMPLEX64]
-
- case types.TFLOAT64:
- t = types.Types[types.TCOMPLEX128]
- }
- n.SetType(t)
- return n
-
- case ir.OCLOSE:
- n := n.(*ir.UnaryExpr)
- n.X = typecheck(n.X, ctxExpr)
- n.X = defaultlit(n.X, nil)
- l := n.X
- t := l.Type()
- if t == nil {
- n.SetType(nil)
- return n
- }
- if !t.IsChan() {
- base.Errorf("invalid operation: %v (non-chan type %v)", n, t)
- n.SetType(nil)
- return n
- }
-
- if !t.ChanDir().CanSend() {
- base.Errorf("invalid operation: %v (cannot close receive-only channel)", n)
- n.SetType(nil)
- return n
- }
- return n
-
- case ir.ODELETE:
- n := n.(*ir.CallExpr)
- typecheckargs(n)
- args := n.Args
- if len(args) == 0 {
- base.Errorf("missing arguments to delete")
- n.SetType(nil)
- return n
- }
-
- if len(args) == 1 {
- base.Errorf("missing second (key) argument to delete")
- n.SetType(nil)
- return n
- }
-
- if len(args) != 2 {
- base.Errorf("too many arguments to delete")
- n.SetType(nil)
- return n
- }
-
- l := args[0]
- r := args[1]
- if l.Type() != nil && !l.Type().IsMap() {
- base.Errorf("first argument to delete must be map; have %L", l.Type())
- n.SetType(nil)
- return n
- }
-
- args[1] = assignconv(r, l.Type().Key(), "delete")
- return n
-
- case ir.OAPPEND:
- n := n.(*ir.CallExpr)
- typecheckargs(n)
- args := n.Args
- if len(args) == 0 {
- base.Errorf("missing arguments to append")
- n.SetType(nil)
- return n
- }
-
- t := args[0].Type()
- if t == nil {
- n.SetType(nil)
- return n
- }
-
- n.SetType(t)
- if !t.IsSlice() {
- if ir.IsNil(args[0]) {
- base.Errorf("first argument to append must be typed slice; have untyped nil")
- n.SetType(nil)
- return n
- }
-
- base.Errorf("first argument to append must be slice; have %L", t)
- n.SetType(nil)
- return n
- }
-
- if n.IsDDD {
- if len(args) == 1 {
- base.Errorf("cannot use ... on first argument to append")
- n.SetType(nil)
- return n
- }
-
- if len(args) != 2 {
- base.Errorf("too many arguments to append")
- n.SetType(nil)
- return n
- }
-
- if t.Elem().IsKind(types.TUINT8) && args[1].Type().IsString() {
- args[1] = defaultlit(args[1], types.Types[types.TSTRING])
- return n
- }
-
- args[1] = assignconv(args[1], t.Underlying(), "append")
- return n
- }
-
- as := args[1:]
- for i, n := range as {
- if n.Type() == nil {
- continue
- }
- as[i] = assignconv(n, t.Elem(), "append")
- types.CheckSize(as[i].Type()) // ensure width is calculated for backend
- }
- return n
-
- case ir.OCOPY:
- n := n.(*ir.BinaryExpr)
- n.SetType(types.Types[types.TINT])
- n.X = typecheck(n.X, ctxExpr)
- n.X = defaultlit(n.X, nil)
- n.Y = typecheck(n.Y, ctxExpr)
- n.Y = defaultlit(n.Y, nil)
- if n.X.Type() == nil || n.Y.Type() == nil {
- n.SetType(nil)
- return n
- }
-
- // copy([]byte, string)
- if n.X.Type().IsSlice() && n.Y.Type().IsString() {
- if types.Identical(n.X.Type().Elem(), types.ByteType) {
- return n
- }
- base.Errorf("arguments to copy have different element types: %L and string", n.X.Type())
- n.SetType(nil)
- return n
- }
-
- if !n.X.Type().IsSlice() || !n.Y.Type().IsSlice() {
- if !n.X.Type().IsSlice() && !n.Y.Type().IsSlice() {
- base.Errorf("arguments to copy must be slices; have %L, %L", n.X.Type(), n.Y.Type())
- } else if !n.X.Type().IsSlice() {
- base.Errorf("first argument to copy should be slice; have %L", n.X.Type())
- } else {
- base.Errorf("second argument to copy should be slice or string; have %L", n.Y.Type())
- }
- n.SetType(nil)
- return n
- }
-
- if !types.Identical(n.X.Type().Elem(), n.Y.Type().Elem()) {
- base.Errorf("arguments to copy have different element types: %L and %L", n.X.Type(), n.Y.Type())
- n.SetType(nil)
- return n
- }
- return n
-
- case ir.OCONV:
- n := n.(*ir.ConvExpr)
- types.CheckSize(n.Type()) // ensure width is calculated for backend
- n.X = typecheck(n.X, ctxExpr)
- n.X = convlit1(n.X, n.Type(), true, nil)
- t := n.X.Type()
- if t == nil || n.Type() == nil {
- n.SetType(nil)
- return n
- }
- op, why := convertop(n.X.Op() == ir.OLITERAL, t, n.Type())
- if op == ir.OXXX {
- if !n.Diag() && !n.Type().Broke() && !n.X.Diag() {
- base.Errorf("cannot convert %L to type %v%s", n.X, n.Type(), why)
- n.SetDiag(true)
- }
- n.SetOp(ir.OCONV)
- n.SetType(nil)
- return n
- }
-
- n.SetOp(op)
- switch n.Op() {
- case ir.OCONVNOP:
- if t.Kind() == n.Type().Kind() {
- switch t.Kind() {
- case types.TFLOAT32, types.TFLOAT64, types.TCOMPLEX64, types.TCOMPLEX128:
- // Floating point casts imply rounding and
- // so the conversion must be kept.
- n.SetOp(ir.OCONV)
- }
- }
-
- // do not convert to []byte literal. See CL 125796.
- // generated code and compiler memory footprint is better without it.
- case ir.OSTR2BYTES:
- // ok
-
- case ir.OSTR2RUNES:
- if n.X.Op() == ir.OLITERAL {
- return stringtoruneslit(n)
- }
- }
- return n
-
- case ir.OMAKE:
- n := n.(*ir.CallExpr)
- args := n.Args
- if len(args) == 0 {
- base.Errorf("missing argument to make")
- n.SetType(nil)
- return n
- }
-
- n.Args.Set(nil)
- l := args[0]
- l = typecheck(l, ctxType)
- t := l.Type()
- if t == nil {
- n.SetType(nil)
- return n
- }
-
- i := 1
- var nn ir.Node
- switch t.Kind() {
- default:
- base.Errorf("cannot make type %v", t)
- n.SetType(nil)
- return n
-
- case types.TSLICE:
- if i >= len(args) {
- base.Errorf("missing len argument to make(%v)", t)
- n.SetType(nil)
- return n
- }
-
- l = args[i]
- i++
- l = typecheck(l, ctxExpr)
- var r ir.Node
- if i < len(args) {
- r = args[i]
- i++
- r = typecheck(r, ctxExpr)
- }
-
- if l.Type() == nil || (r != nil && r.Type() == nil) {
- n.SetType(nil)
- return n
- }
- if !checkmake(t, "len", &l) || r != nil && !checkmake(t, "cap", &r) {
- n.SetType(nil)
- return n
- }
- if ir.IsConst(l, constant.Int) && r != nil && ir.IsConst(r, constant.Int) && constant.Compare(l.Val(), token.GTR, r.Val()) {
- base.Errorf("len larger than cap in make(%v)", t)
- n.SetType(nil)
- return n
- }
- nn = ir.NewMakeExpr(n.Pos(), ir.OMAKESLICE, l, r)
-
- case types.TMAP:
- if i < len(args) {
- l = args[i]
- i++
- l = typecheck(l, ctxExpr)
- l = defaultlit(l, types.Types[types.TINT])
- if l.Type() == nil {
- n.SetType(nil)
- return n
- }
- if !checkmake(t, "size", &l) {
- n.SetType(nil)
- return n
- }
- } else {
- l = ir.NewInt(0)
- }
- nn = ir.NewMakeExpr(n.Pos(), ir.OMAKEMAP, l, nil)
- nn.SetEsc(n.Esc())
-
- case types.TCHAN:
- l = nil
- if i < len(args) {
- l = args[i]
- i++
- l = typecheck(l, ctxExpr)
- l = defaultlit(l, types.Types[types.TINT])
- if l.Type() == nil {
- n.SetType(nil)
- return n
- }
- if !checkmake(t, "buffer", &l) {
- n.SetType(nil)
- return n
- }
- } else {
- l = ir.NewInt(0)
- }
- nn = ir.NewMakeExpr(n.Pos(), ir.OMAKECHAN, l, nil)
- }
-
- if i < len(args) {
- base.Errorf("too many arguments to make(%v)", t)
- n.SetType(nil)
- return n
- }
-
- nn.SetType(t)
- return nn
-
- case ir.ONEW:
- n := n.(*ir.UnaryExpr)
- if n.X == nil {
- // Fatalf because the OCALL above checked for us,
- // so this must be an internally-generated mistake.
- base.Fatalf("missing argument to new")
- }
- l := n.X
- l = typecheck(l, ctxType)
- t := l.Type()
- if t == nil {
- n.SetType(nil)
- return n
- }
- n.X = l
- n.SetType(types.NewPtr(t))
- return n
-
- case ir.OPRINT, ir.OPRINTN:
- n := n.(*ir.CallExpr)
- typecheckargs(n)
- ls := n.Args
- for i1, n1 := range ls {
- // Special case for print: int constant is int64, not int.
- if ir.IsConst(n1, constant.Int) {
- ls[i1] = defaultlit(ls[i1], types.Types[types.TINT64])
- } else {
- ls[i1] = defaultlit(ls[i1], nil)
- }
- }
- return n
-
- case ir.OPANIC:
- n := n.(*ir.UnaryExpr)
- n.X = typecheck(n.X, ctxExpr)
- n.X = defaultlit(n.X, types.Types[types.TINTER])
- if n.X.Type() == nil {
- n.SetType(nil)
- return n
- }
- return n
-
- case ir.ORECOVER:
- n := n.(*ir.CallExpr)
- if len(n.Args) != 0 {
- base.Errorf("too many arguments to recover")
- n.SetType(nil)
- return n
- }
-
- n.SetType(types.Types[types.TINTER])
- return n
-
- case ir.OCLOSURE:
- n := n.(*ir.ClosureExpr)
- typecheckclosure(n, top)
- if n.Type() == nil {
- return n
- }
- return n
-
- case ir.OITAB:
- n := n.(*ir.UnaryExpr)
- n.X = typecheck(n.X, ctxExpr)
- t := n.X.Type()
- if t == nil {
- n.SetType(nil)
- return n
- }
- if !t.IsInterface() {
- base.Fatalf("OITAB of %v", t)
- }
- n.SetType(types.NewPtr(types.Types[types.TUINTPTR]))
- return n
-
- case ir.OIDATA:
- // Whoever creates the OIDATA node must know a priori the concrete type at that moment,
- // usually by just having checked the OITAB.
- n := n.(*ir.UnaryExpr)
- base.Fatalf("cannot typecheck interface data %v", n)
- panic("unreachable")
-
- case ir.OSPTR:
- n := n.(*ir.UnaryExpr)
- n.X = typecheck(n.X, ctxExpr)
- t := n.X.Type()
- if t == nil {
- n.SetType(nil)
- return n
- }
- if !t.IsSlice() && !t.IsString() {
- base.Fatalf("OSPTR of %v", t)
- }
- if t.IsString() {
- n.SetType(types.NewPtr(types.Types[types.TUINT8]))
- } else {
- n.SetType(types.NewPtr(t.Elem()))
- }
- return n
-
- case ir.OCLOSUREREAD:
- return n
-
- case ir.OCFUNC:
- n := n.(*ir.UnaryExpr)
- n.X = typecheck(n.X, ctxExpr)
- n.SetType(types.Types[types.TUINTPTR])
- return n
-
- case ir.OCONVNOP:
- n := n.(*ir.ConvExpr)
- n.X = typecheck(n.X, ctxExpr)
- return n
-
- // statements
- case ir.OAS:
- n := n.(*ir.AssignStmt)
- typecheckas(n)
-
- // Code that creates temps does not bother to set defn, so do it here.
- if n.X.Op() == ir.ONAME && ir.IsAutoTmp(n.X) {
- n.X.Name().Defn = n
- }
- return n
-
- case ir.OAS2:
- typecheckas2(n.(*ir.AssignListStmt))
- return n
-
- case ir.OBREAK,
- ir.OCONTINUE,
- ir.ODCL,
- ir.OGOTO,
- ir.OFALL,
- ir.OVARKILL,
- ir.OVARLIVE:
- return n
-
- case ir.OBLOCK:
- n := n.(*ir.BlockStmt)
- typecheckslice(n.List, ctxStmt)
- return n
-
- case ir.OLABEL:
- decldepth++
- if n.Sym().IsBlank() {
- // Empty identifier is valid but useless.
- // Eliminate now to simplify life later.
- // See issues 7538, 11589, 11593.
- n = ir.NewBlockStmt(n.Pos(), nil)
- }
- return n
-
- case ir.ODEFER, ir.OGO:
- n := n.(*ir.GoDeferStmt)
- n.Call = typecheck(n.Call, ctxStmt|ctxExpr)
- if !n.Call.Diag() {
- checkdefergo(n)
- }
- return n
-
- case ir.OFOR, ir.OFORUNTIL:
- n := n.(*ir.ForStmt)
- typecheckslice(n.Init(), ctxStmt)
- decldepth++
- n.Cond = typecheck(n.Cond, ctxExpr)
- n.Cond = defaultlit(n.Cond, nil)
- if n.Cond != nil {
- t := n.Cond.Type()
- if t != nil && !t.IsBoolean() {
- base.Errorf("non-bool %L used as for condition", n.Cond)
- }
- }
- n.Post = typecheck(n.Post, ctxStmt)
- if n.Op() == ir.OFORUNTIL {
- typecheckslice(n.Late, ctxStmt)
- }
- typecheckslice(n.Body, ctxStmt)
- decldepth--
- return n
-
- case ir.OIF:
- n := n.(*ir.IfStmt)
- typecheckslice(n.Init(), ctxStmt)
- n.Cond = typecheck(n.Cond, ctxExpr)
- n.Cond = defaultlit(n.Cond, nil)
- if n.Cond != nil {
- t := n.Cond.Type()
- if t != nil && !t.IsBoolean() {
- base.Errorf("non-bool %L used as if condition", n.Cond)
- }
- }
- typecheckslice(n.Body, ctxStmt)
- typecheckslice(n.Else, ctxStmt)
- return n
-
- case ir.ORETURN:
- n := n.(*ir.ReturnStmt)
- typecheckargs(n)
- if ir.CurFunc == nil {
- base.Errorf("return outside function")
- n.SetType(nil)
- return n
- }
-
- if ir.HasNamedResults(ir.CurFunc) && len(n.Results) == 0 {
- return n
- }
- typecheckaste(ir.ORETURN, nil, false, ir.CurFunc.Type().Results(), n.Results, func() string { return "return argument" })
- return n
-
- case ir.ORETJMP:
- n := n.(*ir.BranchStmt)
- return n
-
- case ir.OSELECT:
- typecheckselect(n.(*ir.SelectStmt))
- return n
-
- case ir.OSWITCH:
- typecheckswitch(n.(*ir.SwitchStmt))
- return n
-
- case ir.ORANGE:
- typecheckrange(n.(*ir.RangeStmt))
- return n
-
- case ir.OTYPESW:
- n := n.(*ir.TypeSwitchGuard)
- base.Errorf("use of .(type) outside type switch")
- n.SetType(nil)
- return n
-
- case ir.ODCLFUNC:
- typecheckfunc(n.(*ir.Func))
- return n
-
- case ir.ODCLCONST:
- n := n.(*ir.Decl)
- n.X = typecheck(n.X, ctxExpr)
- return n
-
- case ir.ODCLTYPE:
- n := n.(*ir.Decl)
- n.X = typecheck(n.X, ctxType)
- types.CheckSize(n.X.Type())
- return n
- }
-
- // No return n here!
- // Individual cases can type-assert n, introducing a new one.
- // Each must execute its own return n.
-}
-
-func typecheckargs(n ir.Node) {
- var list []ir.Node
- switch n := n.(type) {
- default:
- base.Fatalf("typecheckargs %+v", n.Op())
- case *ir.CallExpr:
- list = n.Args
- if n.IsDDD {
- typecheckslice(list, ctxExpr)
- return
- }
- case *ir.ReturnStmt:
- list = n.Results
- }
- if len(list) != 1 {
- typecheckslice(list, ctxExpr)
- return
- }
-
- typecheckslice(list, ctxExpr|ctxMultiOK)
- t := list[0].Type()
- if t == nil || !t.IsFuncArgStruct() {
- return
- }
-
- // Rewrite f(g()) into t1, t2, ... = g(); f(t1, t2, ...).
-
- // Save n as n.Orig for fmt.go.
- if ir.Orig(n) == n {
- n.(ir.OrigNode).SetOrig(ir.SepCopy(n))
- }
-
- as := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
- as.Rhs.Append(list...)
-
- // If we're outside of function context, then this call will
- // be executed during the generated init function. However,
- // init.go hasn't yet created it. Instead, associate the
- // temporary variables with initTodo for now, and init.go
- // will reassociate them later when it's appropriate.
- static := ir.CurFunc == nil
- if static {
- ir.CurFunc = initTodo
- }
- list = nil
- for _, f := range t.FieldSlice() {
- t := temp(f.Type)
- as.PtrInit().Append(ir.NewDecl(base.Pos, ir.ODCL, t))
- as.Lhs.Append(t)
- list = append(list, t)
- }
- if static {
- ir.CurFunc = nil
- }
-
- switch n := n.(type) {
- case *ir.CallExpr:
- n.Args.Set(list)
- case *ir.ReturnStmt:
- n.Results.Set(list)
- }
-
- n.PtrInit().Append(typecheck(as, ctxStmt))
-}
-
-func checksliceindex(l ir.Node, r ir.Node, tp *types.Type) bool {
- t := r.Type()
- if t == nil {
- return false
- }
- if !t.IsInteger() {
- base.Errorf("invalid slice index %v (type %v)", r, t)
- return false
- }
-
- if r.Op() == ir.OLITERAL {
- x := r.Val()
- if constant.Sign(x) < 0 {
- base.Errorf("invalid slice index %v (index must be non-negative)", r)
- return false
- } else if tp != nil && tp.NumElem() >= 0 && constant.Compare(x, token.GTR, constant.MakeInt64(tp.NumElem())) {
- base.Errorf("invalid slice index %v (out of bounds for %d-element array)", r, tp.NumElem())
- return false
- } else if ir.IsConst(l, constant.String) && constant.Compare(x, token.GTR, constant.MakeInt64(int64(len(ir.StringVal(l))))) {
- base.Errorf("invalid slice index %v (out of bounds for %d-byte string)", r, len(ir.StringVal(l)))
- return false
- } else if ir.ConstOverflow(x, types.Types[types.TINT]) {
- base.Errorf("invalid slice index %v (index too large)", r)
- return false
- }
- }
-
- return true
-}
-
-func checksliceconst(lo ir.Node, hi ir.Node) bool {
- if lo != nil && hi != nil && lo.Op() == ir.OLITERAL && hi.Op() == ir.OLITERAL && constant.Compare(lo.Val(), token.GTR, hi.Val()) {
- base.Errorf("invalid slice index: %v > %v", lo, hi)
- return false
- }
-
- return true
-}
-
-func checkdefergo(n *ir.GoDeferStmt) {
- what := "defer"
- if n.Op() == ir.OGO {
- what = "go"
- }
-
- switch n.Call.Op() {
- // ok
- case ir.OCALLINTER,
- ir.OCALLMETH,
- ir.OCALLFUNC,
- ir.OCLOSE,
- ir.OCOPY,
- ir.ODELETE,
- ir.OPANIC,
- ir.OPRINT,
- ir.OPRINTN,
- ir.ORECOVER:
- return
-
- case ir.OAPPEND,
- ir.OCAP,
- ir.OCOMPLEX,
- ir.OIMAG,
- ir.OLEN,
- ir.OMAKE,
- ir.OMAKESLICE,
- ir.OMAKECHAN,
- ir.OMAKEMAP,
- ir.ONEW,
- ir.OREAL,
- ir.OLITERAL: // conversion or unsafe.Alignof, Offsetof, Sizeof
- if orig := ir.Orig(n.Call); orig.Op() == ir.OCONV {
- break
- }
- base.ErrorfAt(n.Pos(), "%s discards result of %v", what, n.Call)
- return
- }
-
- // type is broken or missing, most likely a method call on a broken type
- // we will warn about the broken type elsewhere. no need to emit a potentially confusing error
- if n.Call.Type() == nil || n.Call.Type().Broke() {
- return
- }
-
- if !n.Diag() {
- // The syntax made sure it was a call, so this must be
- // a conversion.
- n.SetDiag(true)
- base.ErrorfAt(n.Pos(), "%s requires function call, not conversion", what)
- }
-}
-
-// The result of implicitstar MUST be assigned back to n, e.g.
-// n.Left = implicitstar(n.Left)
-func implicitstar(n ir.Node) ir.Node {
- // insert implicit * if needed for fixed array
- t := n.Type()
- if t == nil || !t.IsPtr() {
- return n
- }
- t = t.Elem()
- if t == nil {
- return n
- }
- if !t.IsArray() {
- return n
- }
- star := ir.NewStarExpr(base.Pos, n)
- star.SetImplicit(true)
- return typecheck(star, ctxExpr)
-}
-
-func needOneArg(n *ir.CallExpr, f string, args ...interface{}) (ir.Node, bool) {
- if len(n.Args) == 0 {
- p := fmt.Sprintf(f, args...)
- base.Errorf("missing argument to %s: %v", p, n)
- return nil, false
- }
-
- if len(n.Args) > 1 {
- p := fmt.Sprintf(f, args...)
- base.Errorf("too many arguments to %s: %v", p, n)
- return n.Args[0], false
- }
-
- return n.Args[0], true
-}
-
-func needTwoArgs(n *ir.CallExpr) (ir.Node, ir.Node, bool) {
- if len(n.Args) != 2 {
- if len(n.Args) < 2 {
- base.Errorf("not enough arguments in call to %v", n)
- } else {
- base.Errorf("too many arguments in call to %v", n)
- }
- return nil, nil, false
- }
- return n.Args[0], n.Args[1], true
-}
-
-func lookdot1(errnode ir.Node, s *types.Sym, t *types.Type, fs *types.Fields, dostrcmp int) *types.Field {
- var r *types.Field
- for _, f := range fs.Slice() {
- if dostrcmp != 0 && f.Sym.Name == s.Name {
- return f
- }
- if dostrcmp == 2 && strings.EqualFold(f.Sym.Name, s.Name) {
- return f
- }
- if f.Sym != s {
- continue
- }
- if r != nil {
- if errnode != nil {
- base.Errorf("ambiguous selector %v", errnode)
- } else if t.IsPtr() {
- base.Errorf("ambiguous selector (%v).%v", t, s)
- } else {
- base.Errorf("ambiguous selector %v.%v", t, s)
- }
- break
- }
-
- r = f
- }
-
- return r
-}
-
-// typecheckMethodExpr checks selector expressions (ODOT) where the
-// base expression is a type expression (OTYPE).
-func typecheckMethodExpr(n *ir.SelectorExpr) (res ir.Node) {
- if base.EnableTrace && base.Flag.LowerT {
- defer tracePrint("typecheckMethodExpr", n)(&res)
- }
-
- t := n.X.Type()
-
- // Compute the method set for t.
- var ms *types.Fields
- if t.IsInterface() {
- ms = t.Fields()
- } else {
- mt := types.ReceiverBaseType(t)
- if mt == nil {
- base.Errorf("%v undefined (type %v has no method %v)", n, t, n.Sel)
- n.SetType(nil)
- return n
- }
- expandmeth(mt)
- ms = mt.AllMethods()
-
- // The method expression T.m requires a wrapper when T
- // is different from m's declared receiver type. We
- // normally generate these wrappers while writing out
- // runtime type descriptors, which is always done for
- // types declared at package scope. However, we need
- // to make sure to generate wrappers for anonymous
- // receiver types too.
- if mt.Sym() == nil {
- NeedRuntimeType(t)
- }
- }
-
- s := n.Sel
- m := lookdot1(n, s, t, ms, 0)
- if m == nil {
- if lookdot1(n, s, t, ms, 1) != nil {
- base.Errorf("%v undefined (cannot refer to unexported method %v)", n, s)
- } else if _, ambig := dotpath(s, t, nil, false); ambig {
- base.Errorf("%v undefined (ambiguous selector)", n) // method or field
- } else {
- base.Errorf("%v undefined (type %v has no method %v)", n, t, s)
- }
- n.SetType(nil)
- return n
- }
-
- if !types.IsMethodApplicable(t, m) {
- base.Errorf("invalid method expression %v (needs pointer receiver: (*%v).%S)", n, t, s)
- n.SetType(nil)
- return n
- }
-
- me := ir.NewMethodExpr(n.Pos(), n.X.Type(), m)
- me.SetType(methodfunc(m.Type, n.X.Type()))
- f := NewName(ir.MethodSym(t, m.Sym))
- f.Class_ = ir.PFUNC
- f.SetType(me.Type())
- me.FuncName_ = f
-
- // Issue 25065. Make sure that we emit the symbol for a local method.
- if base.Ctxt.Flag_dynlink && !inimport && (t.Sym() == nil || t.Sym().Pkg == types.LocalPkg) {
- NeedFuncSym(me.FuncName_.Sym())
- }
-
- return me
-}
-
-func derefall(t *types.Type) *types.Type {
- for t != nil && t.IsPtr() {
- t = t.Elem()
- }
- return t
-}
-
-func lookdot(n *ir.SelectorExpr, t *types.Type, dostrcmp int) *types.Field {
- s := n.Sel
-
- types.CalcSize(t)
- var f1 *types.Field
- if t.IsStruct() || t.IsInterface() {
- f1 = lookdot1(n, s, t, t.Fields(), dostrcmp)
- }
-
- var f2 *types.Field
- if n.X.Type() == t || n.X.Type().Sym() == nil {
- mt := types.ReceiverBaseType(t)
- if mt != nil {
- f2 = lookdot1(n, s, mt, mt.Methods(), dostrcmp)
- }
- }
-
- if f1 != nil {
- if dostrcmp > 1 || f1.Broke() {
- // Already in the process of diagnosing an error.
- return f1
- }
- if f2 != nil {
- base.Errorf("%v is both field and method", n.Sel)
- }
- if f1.Offset == types.BADWIDTH {
- base.Fatalf("lookdot badwidth %v %p", f1, f1)
- }
- n.Offset = f1.Offset
- n.SetType(f1.Type)
- if t.IsInterface() {
- if n.X.Type().IsPtr() {
- star := ir.NewStarExpr(base.Pos, n.X)
- star.SetImplicit(true)
- n.X = typecheck(star, ctxExpr)
- }
-
- n.SetOp(ir.ODOTINTER)
- }
- n.Selection = f1
- return f1
- }
-
- if f2 != nil {
- if dostrcmp > 1 {
- // Already in the process of diagnosing an error.
- return f2
- }
- tt := n.X.Type()
- types.CalcSize(tt)
- rcvr := f2.Type.Recv().Type
- if !types.Identical(rcvr, tt) {
- if rcvr.IsPtr() && types.Identical(rcvr.Elem(), tt) {
- checklvalue(n.X, "call pointer method on")
- addr := nodAddr(n.X)
- addr.SetImplicit(true)
- n.X = typecheck(addr, ctxType|ctxExpr)
- } else if tt.IsPtr() && (!rcvr.IsPtr() || rcvr.IsPtr() && rcvr.Elem().NotInHeap()) && types.Identical(tt.Elem(), rcvr) {
- star := ir.NewStarExpr(base.Pos, n.X)
- star.SetImplicit(true)
- n.X = typecheck(star, ctxType|ctxExpr)
- } else if tt.IsPtr() && tt.Elem().IsPtr() && types.Identical(derefall(tt), derefall(rcvr)) {
- base.Errorf("calling method %v with receiver %L requires explicit dereference", n.Sel, n.X)
- for tt.IsPtr() {
- // Stop one level early for method with pointer receiver.
- if rcvr.IsPtr() && !tt.Elem().IsPtr() {
- break
- }
- star := ir.NewStarExpr(base.Pos, n.X)
- star.SetImplicit(true)
- n.X = typecheck(star, ctxType|ctxExpr)
- tt = tt.Elem()
- }
- } else {
- base.Fatalf("method mismatch: %v for %v", rcvr, tt)
- }
- }
-
- implicit, ll := n.Implicit(), n.X
- for ll != nil && (ll.Op() == ir.ODOT || ll.Op() == ir.ODOTPTR || ll.Op() == ir.ODEREF) {
- switch l := ll.(type) {
- case *ir.SelectorExpr:
- implicit, ll = l.Implicit(), l.X
- case *ir.StarExpr:
- implicit, ll = l.Implicit(), l.X
- }
- }
- if implicit && ll.Type().IsPtr() && ll.Type().Sym() != nil && ll.Type().Sym().Def != nil && ir.AsNode(ll.Type().Sym().Def).Op() == ir.OTYPE {
- // It is invalid to automatically dereference a named pointer type when selecting a method.
- // Make n.Left == ll to clarify error message.
- n.X = ll
- return nil
- }
-
- n.Sel = ir.MethodSym(n.X.Type(), f2.Sym)
- n.Offset = f2.Offset
- n.SetType(f2.Type)
- n.SetOp(ir.ODOTMETH)
- n.Selection = f2
-
- return f2
- }
-
- return nil
-}
-
-func nokeys(l ir.Nodes) bool {
- for _, n := range l {
- if n.Op() == ir.OKEY || n.Op() == ir.OSTRUCTKEY {
- return false
- }
- }
- return true
-}
-
-func hasddd(t *types.Type) bool {
- for _, tl := range t.Fields().Slice() {
- if tl.IsDDD() {
- return true
- }
- }
-
- return false
-}
-
-// typecheck assignment: type list = expression list
-func typecheckaste(op ir.Op, call ir.Node, isddd bool, tstruct *types.Type, nl ir.Nodes, desc func() string) {
- var t *types.Type
- var i int
-
- lno := base.Pos
- defer func() { base.Pos = lno }()
-
- if tstruct.Broke() {
- return
- }
-
- var n ir.Node
- if len(nl) == 1 {
- n = nl[0]
- }
-
- n1 := tstruct.NumFields()
- n2 := len(nl)
- if !hasddd(tstruct) {
- if n2 > n1 {
- goto toomany
- }
- if n2 < n1 {
- goto notenough
- }
- } else {
- if !isddd {
- if n2 < n1-1 {
- goto notenough
- }
- } else {
- if n2 > n1 {
- goto toomany
- }
- if n2 < n1 {
- goto notenough
- }
- }
- }
-
- i = 0
- for _, tl := range tstruct.Fields().Slice() {
- t = tl.Type
- if tl.IsDDD() {
- if isddd {
- if i >= len(nl) {
- goto notenough
- }
- if len(nl)-i > 1 {
- goto toomany
- }
- n = nl[i]
- ir.SetPos(n)
- if n.Type() != nil {
- nl[i] = assignconvfn(n, t, desc)
- }
- return
- }
-
- // TODO(mdempsky): Make into ... call with implicit slice.
- for ; i < len(nl); i++ {
- n = nl[i]
- ir.SetPos(n)
- if n.Type() != nil {
- nl[i] = assignconvfn(n, t.Elem(), desc)
- }
- }
- return
- }
-
- if i >= len(nl) {
- goto notenough
- }
- n = nl[i]
- ir.SetPos(n)
- if n.Type() != nil {
- nl[i] = assignconvfn(n, t, desc)
- }
- i++
- }
-
- if i < len(nl) {
- goto toomany
- }
- if isddd {
- if call != nil {
- base.Errorf("invalid use of ... in call to %v", call)
- } else {
- base.Errorf("invalid use of ... in %v", op)
- }
- }
- return
-
-notenough:
- if n == nil || (!n.Diag() && n.Type() != nil) {
- details := errorDetails(nl, tstruct, isddd)
- if call != nil {
- // call is the expression being called, not the overall call.
- // Method expressions have the form T.M, and the compiler has
- // rewritten those to ONAME nodes but left T in Left.
- if call.Op() == ir.OMETHEXPR {
- call := call.(*ir.MethodExpr)
- base.Errorf("not enough arguments in call to method expression %v%s", call, details)
- } else {
- base.Errorf("not enough arguments in call to %v%s", call, details)
- }
- } else {
- base.Errorf("not enough arguments to %v%s", op, details)
- }
- if n != nil {
- n.SetDiag(true)
- }
- }
- return
-
-toomany:
- details := errorDetails(nl, tstruct, isddd)
- if call != nil {
- base.Errorf("too many arguments in call to %v%s", call, details)
- } else {
- base.Errorf("too many arguments to %v%s", op, details)
- }
-}
-
-func errorDetails(nl ir.Nodes, tstruct *types.Type, isddd bool) string {
- // If we don't know any type at a call site, let's suppress any return
- // message signatures. See Issue https://golang.org/issues/19012.
- if tstruct == nil {
- return ""
- }
- // If any node has an unknown type, suppress it as well
- for _, n := range nl {
- if n.Type() == nil {
- return ""
- }
- }
- return fmt.Sprintf("\n\thave %s\n\twant %v", fmtSignature(nl, isddd), tstruct)
-}
-
-// sigrepr is a type's representation to the outside world,
-// in string representations of return signatures
-// e.g in error messages about wrong arguments to return.
-func sigrepr(t *types.Type, isddd bool) string {
- switch t {
- case types.UntypedString:
- return "string"
- case types.UntypedBool:
- return "bool"
- }
-
- if t.Kind() == types.TIDEAL {
- // "untyped number" is not commonly used
- // outside of the compiler, so let's use "number".
- // TODO(mdempsky): Revisit this.
- return "number"
- }
-
- // Turn []T... argument to ...T for clearer error message.
- if isddd {
- if !t.IsSlice() {
- base.Fatalf("bad type for ... argument: %v", t)
- }
- return "..." + t.Elem().String()
- }
- return t.String()
-}
-
-// sigerr returns the signature of the types at the call or return.
-func fmtSignature(nl ir.Nodes, isddd bool) string {
- if len(nl) < 1 {
- return "()"
- }
-
- var typeStrings []string
- for i, n := range nl {
- isdddArg := isddd && i == len(nl)-1
- typeStrings = append(typeStrings, sigrepr(n.Type(), isdddArg))
- }
-
- return fmt.Sprintf("(%s)", strings.Join(typeStrings, ", "))
-}
-
-// type check composite
-func fielddup(name string, hash map[string]bool) {
- if hash[name] {
- base.Errorf("duplicate field name in struct literal: %s", name)
- return
- }
- hash[name] = true
-}
-
-// iscomptype reports whether type t is a composite literal type.
-func iscomptype(t *types.Type) bool {
- switch t.Kind() {
- case types.TARRAY, types.TSLICE, types.TSTRUCT, types.TMAP:
- return true
- default:
- return false
- }
-}
-
-// pushtype adds elided type information for composite literals if
-// appropriate, and returns the resulting expression.
-func pushtype(nn ir.Node, t *types.Type) ir.Node {
- if nn == nil || nn.Op() != ir.OCOMPLIT {
- return nn
- }
- n := nn.(*ir.CompLitExpr)
- if n.Ntype != nil {
- return n
- }
-
- switch {
- case iscomptype(t):
- // For T, return T{...}.
- n.Ntype = ir.TypeNode(t)
-
- case t.IsPtr() && iscomptype(t.Elem()):
- // For *T, return &T{...}.
- n.Ntype = ir.TypeNode(t.Elem())
-
- addr := nodAddrAt(n.Pos(), n)
- addr.SetImplicit(true)
- return addr
- }
- return n
-}
-
-// The result of typecheckcomplit MUST be assigned back to n, e.g.
-// n.Left = typecheckcomplit(n.Left)
-func typecheckcomplit(n *ir.CompLitExpr) (res ir.Node) {
- if base.EnableTrace && base.Flag.LowerT {
- defer tracePrint("typecheckcomplit", n)(&res)
- }
-
- lno := base.Pos
- defer func() {
- base.Pos = lno
- }()
-
- if n.Ntype == nil {
- base.ErrorfAt(n.Pos(), "missing type in composite literal")
- n.SetType(nil)
- return n
- }
-
- // Save original node (including n.Right)
- n.SetOrig(ir.Copy(n))
-
- ir.SetPos(n.Ntype)
-
- // Need to handle [...]T arrays specially.
- if array, ok := n.Ntype.(*ir.ArrayType); ok && array.Elem != nil && array.Len == nil {
- array.Elem = typecheck(array.Elem, ctxType)
- elemType := array.Elem.Type()
- if elemType == nil {
- n.SetType(nil)
- return n
- }
- length := typecheckarraylit(elemType, -1, n.List, "array literal")
- n.SetOp(ir.OARRAYLIT)
- n.SetType(types.NewArray(elemType, length))
- n.Ntype = nil
- return n
- }
-
- n.Ntype = ir.Node(typecheck(n.Ntype, ctxType)).(ir.Ntype)
- t := n.Ntype.Type()
- if t == nil {
- n.SetType(nil)
- return n
- }
- n.SetType(t)
-
- switch t.Kind() {
- default:
- base.Errorf("invalid composite literal type %v", t)
- n.SetType(nil)
-
- case types.TARRAY:
- typecheckarraylit(t.Elem(), t.NumElem(), n.List, "array literal")
- n.SetOp(ir.OARRAYLIT)
- n.Ntype = nil
-
- case types.TSLICE:
- length := typecheckarraylit(t.Elem(), -1, n.List, "slice literal")
- n.SetOp(ir.OSLICELIT)
- n.Ntype = nil
- n.Len = length
-
- case types.TMAP:
- var cs constSet
- for i3, l := range n.List {
- ir.SetPos(l)
- if l.Op() != ir.OKEY {
- n.List[i3] = typecheck(l, ctxExpr)
- base.Errorf("missing key in map literal")
- continue
- }
- l := l.(*ir.KeyExpr)
-
- r := l.Key
- r = pushtype(r, t.Key())
- r = typecheck(r, ctxExpr)
- l.Key = assignconv(r, t.Key(), "map key")
- cs.add(base.Pos, l.Key, "key", "map literal")
-
- r = l.Value
- r = pushtype(r, t.Elem())
- r = typecheck(r, ctxExpr)
- l.Value = assignconv(r, t.Elem(), "map value")
- }
-
- n.SetOp(ir.OMAPLIT)
- n.Ntype = nil
-
- case types.TSTRUCT:
- // Need valid field offsets for Xoffset below.
- types.CalcSize(t)
-
- errored := false
- if len(n.List) != 0 && nokeys(n.List) {
- // simple list of variables
- ls := n.List
- for i, n1 := range ls {
- ir.SetPos(n1)
- n1 = typecheck(n1, ctxExpr)
- ls[i] = n1
- if i >= t.NumFields() {
- if !errored {
- base.Errorf("too many values in %v", n)
- errored = true
- }
- continue
- }
-
- f := t.Field(i)
- s := f.Sym
- if s != nil && !types.IsExported(s.Name) && s.Pkg != types.LocalPkg {
- base.Errorf("implicit assignment of unexported field '%s' in %v literal", s.Name, t)
- }
- // No pushtype allowed here. Must name fields for that.
- n1 = assignconv(n1, f.Type, "field value")
- sk := ir.NewStructKeyExpr(base.Pos, f.Sym, n1)
- sk.Offset = f.Offset
- ls[i] = sk
- }
- if len(ls) < t.NumFields() {
- base.Errorf("too few values in %v", n)
- }
- } else {
- hash := make(map[string]bool)
-
- // keyed list
- ls := n.List
- for i, l := range ls {
- ir.SetPos(l)
-
- if l.Op() == ir.OKEY {
- kv := l.(*ir.KeyExpr)
- key := kv.Key
-
- // Sym might have resolved to name in other top-level
- // package, because of import dot. Redirect to correct sym
- // before we do the lookup.
- s := key.Sym()
- if id, ok := key.(*ir.Ident); ok && dotImportRefs[id] != nil {
- s = lookup(s.Name)
- }
-
- // An OXDOT uses the Sym field to hold
- // the field to the right of the dot,
- // so s will be non-nil, but an OXDOT
- // is never a valid struct literal key.
- if s == nil || s.Pkg != types.LocalPkg || key.Op() == ir.OXDOT || s.IsBlank() {
- base.Errorf("invalid field name %v in struct initializer", key)
- continue
- }
-
- l = ir.NewStructKeyExpr(l.Pos(), s, kv.Value)
- ls[i] = l
- }
-
- if l.Op() != ir.OSTRUCTKEY {
- if !errored {
- base.Errorf("mixture of field:value and value initializers")
- errored = true
- }
- ls[i] = typecheck(ls[i], ctxExpr)
- continue
- }
- l := l.(*ir.StructKeyExpr)
-
- f := lookdot1(nil, l.Field, t, t.Fields(), 0)
- if f == nil {
- if ci := lookdot1(nil, l.Field, t, t.Fields(), 2); ci != nil { // Case-insensitive lookup.
- if visible(ci.Sym) {
- base.Errorf("unknown field '%v' in struct literal of type %v (but does have %v)", l.Field, t, ci.Sym)
- } else if nonexported(l.Field) && l.Field.Name == ci.Sym.Name { // Ensure exactness before the suggestion.
- base.Errorf("cannot refer to unexported field '%v' in struct literal of type %v", l.Field, t)
- } else {
- base.Errorf("unknown field '%v' in struct literal of type %v", l.Field, t)
- }
- continue
- }
- var f *types.Field
- p, _ := dotpath(l.Field, t, &f, true)
- if p == nil || f.IsMethod() {
- base.Errorf("unknown field '%v' in struct literal of type %v", l.Field, t)
- continue
- }
- // dotpath returns the parent embedded types in reverse order.
- var ep []string
- for ei := len(p) - 1; ei >= 0; ei-- {
- ep = append(ep, p[ei].field.Sym.Name)
- }
- ep = append(ep, l.Field.Name)
- base.Errorf("cannot use promoted field %v in struct literal of type %v", strings.Join(ep, "."), t)
- continue
- }
- fielddup(f.Sym.Name, hash)
- l.Offset = f.Offset
-
- // No pushtype allowed here. Tried and rejected.
- l.Value = typecheck(l.Value, ctxExpr)
- l.Value = assignconv(l.Value, f.Type, "field value")
- }
- }
-
- n.SetOp(ir.OSTRUCTLIT)
- n.Ntype = nil
- }
-
- return n
-}
-
-// typecheckarraylit type-checks a sequence of slice/array literal elements.
-func typecheckarraylit(elemType *types.Type, bound int64, elts []ir.Node, ctx string) int64 {
- // If there are key/value pairs, create a map to keep seen
- // keys so we can check for duplicate indices.
- var indices map[int64]bool
- for _, elt := range elts {
- if elt.Op() == ir.OKEY {
- indices = make(map[int64]bool)
- break
- }
- }
-
- var key, length int64
- for i, elt := range elts {
- ir.SetPos(elt)
- r := elts[i]
- var kv *ir.KeyExpr
- if elt.Op() == ir.OKEY {
- elt := elt.(*ir.KeyExpr)
- elt.Key = typecheck(elt.Key, ctxExpr)
- key = indexconst(elt.Key)
- if key < 0 {
- if !elt.Key.Diag() {
- if key == -2 {
- base.Errorf("index too large")
- } else {
- base.Errorf("index must be non-negative integer constant")
- }
- elt.Key.SetDiag(true)
- }
- key = -(1 << 30) // stay negative for a while
- }
- kv = elt
- r = elt.Value
- }
-
- r = pushtype(r, elemType)
- r = typecheck(r, ctxExpr)
- r = assignconv(r, elemType, ctx)
- if kv != nil {
- kv.Value = r
- } else {
- elts[i] = r
- }
-
- if key >= 0 {
- if indices != nil {
- if indices[key] {
- base.Errorf("duplicate index in %s: %d", ctx, key)
- } else {
- indices[key] = true
- }
- }
-
- if bound >= 0 && key >= bound {
- base.Errorf("array index %d out of bounds [0:%d]", key, bound)
- bound = -1
- }
- }
-
- key++
- if key > length {
- length = key
- }
- }
-
- return length
-}
-
-// visible reports whether sym is exported or locally defined.
-func visible(sym *types.Sym) bool {
- return sym != nil && (types.IsExported(sym.Name) || sym.Pkg == types.LocalPkg)
-}
-
-// nonexported reports whether sym is an unexported field.
-func nonexported(sym *types.Sym) bool {
- return sym != nil && !types.IsExported(sym.Name)
-}
-
-func checklvalue(n ir.Node, verb string) {
- if !ir.IsAssignable(n) {
- base.Errorf("cannot %s %v", verb, n)
- }
-}
-
-func checkassign(stmt ir.Node, n ir.Node) {
- // Variables declared in ORANGE are assigned on every iteration.
- if !ir.DeclaredBy(n, stmt) || stmt.Op() == ir.ORANGE {
- r := ir.OuterValue(n)
- if r.Op() == ir.ONAME {
- r := r.(*ir.Name)
- r.Name().SetAssigned(true)
- if r.Name().IsClosureVar() {
- r.Name().Defn.Name().SetAssigned(true)
- }
- }
- }
-
- if ir.IsAssignable(n) {
- return
- }
- if n.Op() == ir.OINDEXMAP {
- n := n.(*ir.IndexExpr)
- n.Assigned = true
- return
- }
-
- // have already complained about n being invalid
- if n.Type() == nil {
- return
- }
-
- switch {
- case n.Op() == ir.ODOT && n.(*ir.SelectorExpr).X.Op() == ir.OINDEXMAP:
- base.Errorf("cannot assign to struct field %v in map", n)
- case (n.Op() == ir.OINDEX && n.(*ir.IndexExpr).X.Type().IsString()) || n.Op() == ir.OSLICESTR:
- base.Errorf("cannot assign to %v (strings are immutable)", n)
- case n.Op() == ir.OLITERAL && n.Sym() != nil && ir.IsConstNode(n):
- base.Errorf("cannot assign to %v (declared const)", n)
- default:
- base.Errorf("cannot assign to %v", n)
- }
- n.SetType(nil)
-}
-
-func checkassignlist(stmt ir.Node, l ir.Nodes) {
- for _, n := range l {
- checkassign(stmt, n)
- }
-}
-
-// type check assignment.
-// if this assignment is the definition of a var on the left side,
-// fill in the var's type.
-func typecheckas(n *ir.AssignStmt) {
- if base.EnableTrace && base.Flag.LowerT {
- defer tracePrint("typecheckas", n)(nil)
- }
-
- // delicate little dance.
- // the definition of n may refer to this assignment
- // as its definition, in which case it will call typecheckas.
- // in that case, do not call typecheck back, or it will cycle.
- // if the variable has a type (ntype) then typechecking
- // will not look at defn, so it is okay (and desirable,
- // so that the conversion below happens).
- n.X = resolve(n.X)
-
- if !ir.DeclaredBy(n.X, n) || n.X.Name().Ntype != nil {
- n.X = typecheck(n.X, ctxExpr|ctxAssign)
- }
-
- // Use ctxMultiOK so we can emit an "N variables but M values" error
- // to be consistent with typecheckas2 (#26616).
- n.Y = typecheck(n.Y, ctxExpr|ctxMultiOK)
- checkassign(n, n.X)
- if n.Y != nil && n.Y.Type() != nil {
- if n.Y.Type().IsFuncArgStruct() {
- base.Errorf("assignment mismatch: 1 variable but %v returns %d values", n.Y.(*ir.CallExpr).X, n.Y.Type().NumFields())
- // Multi-value RHS isn't actually valid for OAS; nil out
- // to indicate failed typechecking.
- n.Y.SetType(nil)
- } else if n.X.Type() != nil {
- n.Y = assignconv(n.Y, n.X.Type(), "assignment")
- }
- }
-
- if ir.DeclaredBy(n.X, n) && n.X.Name().Ntype == nil {
- n.Y = defaultlit(n.Y, nil)
- n.X.SetType(n.Y.Type())
- }
-
- // second half of dance.
- // now that right is done, typecheck the left
- // just to get it over with. see dance above.
- n.SetTypecheck(1)
-
- if n.X.Typecheck() == 0 {
- n.X = typecheck(n.X, ctxExpr|ctxAssign)
- }
- if !ir.IsBlank(n.X) {
- types.CheckSize(n.X.Type()) // ensure width is calculated for backend
- }
-}
-
-func checkassignto(src *types.Type, dst ir.Node) {
- if op, why := assignop(src, dst.Type()); op == ir.OXXX {
- base.Errorf("cannot assign %v to %L in multiple assignment%s", src, dst, why)
- return
- }
-}
-
-func typecheckas2(n *ir.AssignListStmt) {
- if base.EnableTrace && base.Flag.LowerT {
- defer tracePrint("typecheckas2", n)(nil)
- }
-
- ls := n.Lhs
- for i1, n1 := range ls {
- // delicate little dance.
- n1 = resolve(n1)
- ls[i1] = n1
-
- if !ir.DeclaredBy(n1, n) || n1.Name().Ntype != nil {
- ls[i1] = typecheck(ls[i1], ctxExpr|ctxAssign)
- }
- }
-
- cl := len(n.Lhs)
- cr := len(n.Rhs)
- if cl > 1 && cr == 1 {
- n.Rhs[0] = typecheck(n.Rhs[0], ctxExpr|ctxMultiOK)
- } else {
- typecheckslice(n.Rhs, ctxExpr)
- }
- checkassignlist(n, n.Lhs)
-
- var l ir.Node
- var r ir.Node
- if cl == cr {
- // easy
- ls := n.Lhs
- rs := n.Rhs
- for il, nl := range ls {
- nr := rs[il]
- if nl.Type() != nil && nr.Type() != nil {
- rs[il] = assignconv(nr, nl.Type(), "assignment")
- }
- if ir.DeclaredBy(nl, n) && nl.Name().Ntype == nil {
- rs[il] = defaultlit(rs[il], nil)
- nl.SetType(rs[il].Type())
- }
- }
-
- goto out
- }
-
- l = n.Lhs[0]
- r = n.Rhs[0]
-
- // x,y,z = f()
- if cr == 1 {
- if r.Type() == nil {
- goto out
- }
- switch r.Op() {
- case ir.OCALLMETH, ir.OCALLINTER, ir.OCALLFUNC:
- if !r.Type().IsFuncArgStruct() {
- break
- }
- cr = r.Type().NumFields()
- if cr != cl {
- goto mismatch
- }
- r.(*ir.CallExpr).Use = ir.CallUseList
- n.SetOp(ir.OAS2FUNC)
- for i, l := range n.Lhs {
- f := r.Type().Field(i)
- if f.Type != nil && l.Type() != nil {
- checkassignto(f.Type, l)
- }
- if ir.DeclaredBy(l, n) && l.Name().Ntype == nil {
- l.SetType(f.Type)
- }
- }
- goto out
- }
- }
-
- // x, ok = y
- if cl == 2 && cr == 1 {
- if r.Type() == nil {
- goto out
- }
- switch r.Op() {
- case ir.OINDEXMAP, ir.ORECV, ir.ODOTTYPE:
- switch r.Op() {
- case ir.OINDEXMAP:
- n.SetOp(ir.OAS2MAPR)
- case ir.ORECV:
- n.SetOp(ir.OAS2RECV)
- case ir.ODOTTYPE:
- r := r.(*ir.TypeAssertExpr)
- n.SetOp(ir.OAS2DOTTYPE)
- r.SetOp(ir.ODOTTYPE2)
- }
- if l.Type() != nil {
- checkassignto(r.Type(), l)
- }
- if ir.DeclaredBy(l, n) {
- l.SetType(r.Type())
- }
- l := n.Lhs[1]
- if l.Type() != nil && !l.Type().IsBoolean() {
- checkassignto(types.Types[types.TBOOL], l)
- }
- if ir.DeclaredBy(l, n) && l.Name().Ntype == nil {
- l.SetType(types.Types[types.TBOOL])
- }
- goto out
- }
- }
-
-mismatch:
- switch r.Op() {
- default:
- base.Errorf("assignment mismatch: %d variables but %d values", cl, cr)
- case ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER:
- r := r.(*ir.CallExpr)
- base.Errorf("assignment mismatch: %d variables but %v returns %d values", cl, r.X, cr)
- }
-
- // second half of dance
-out:
- n.SetTypecheck(1)
- ls = n.Lhs
- for i1, n1 := range ls {
- if n1.Typecheck() == 0 {
- ls[i1] = typecheck(ls[i1], ctxExpr|ctxAssign)
- }
- }
-}
-
-// type check function definition
-// To be called by typecheck, not directly.
-// (Call typecheckFunc instead.)
-func typecheckfunc(n *ir.Func) {
- if base.EnableTrace && base.Flag.LowerT {
- defer tracePrint("typecheckfunc", n)(nil)
- }
-
- for _, ln := range n.Dcl {
- if ln.Op() == ir.ONAME && (ln.Class_ == ir.PPARAM || ln.Class_ == ir.PPARAMOUT) {
- ln.Decldepth = 1
- }
- }
-
- n.Nname = typecheck(n.Nname, ctxExpr|ctxAssign).(*ir.Name)
- t := n.Nname.Type()
- if t == nil {
- return
- }
- n.SetType(t)
- rcvr := t.Recv()
- if rcvr != nil && n.Shortname != nil {
- m := addmethod(n, n.Shortname, t, true, n.Pragma&ir.Nointerface != 0)
- if m == nil {
- return
- }
-
- n.Nname.SetSym(ir.MethodSym(rcvr.Type, n.Shortname))
- declare(n.Nname, ir.PFUNC)
- }
-
- if base.Ctxt.Flag_dynlink && !inimport && n.Nname != nil {
- NeedFuncSym(n.Sym())
- }
-}
-
-// The result of stringtoruneslit MUST be assigned back to n, e.g.
-// n.Left = stringtoruneslit(n.Left)
-func stringtoruneslit(n *ir.ConvExpr) ir.Node {
- if n.X.Op() != ir.OLITERAL || n.X.Val().Kind() != constant.String {
- base.Fatalf("stringtoarraylit %v", n)
- }
-
- var l []ir.Node
- i := 0
- for _, r := range ir.StringVal(n.X) {
- l = append(l, ir.NewKeyExpr(base.Pos, ir.NewInt(int64(i)), ir.NewInt(int64(r))))
- i++
- }
-
- nn := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(n.Type()).(ir.Ntype), nil)
- nn.List.Set(l)
- return typecheck(nn, ctxExpr)
-}
-
-var mapqueue []*ir.MapType
-
-func checkMapKeys() {
- for _, n := range mapqueue {
- k := n.Type().MapType().Key
- if !k.Broke() && !types.IsComparable(k) {
- base.ErrorfAt(n.Pos(), "invalid map key type %v", k)
- }
- }
- mapqueue = nil
-}
-
-func typecheckdeftype(n *ir.Name) {
- if base.EnableTrace && base.Flag.LowerT {
- defer tracePrint("typecheckdeftype", n)(nil)
- }
-
- t := types.NewNamed(n)
- t.Vargen = n.Vargen
- if n.Pragma()&ir.NotInHeap != 0 {
- t.SetNotInHeap(true)
- }
-
- n.SetType(t)
- n.SetTypecheck(1)
- n.SetWalkdef(1)
-
- types.DeferCheckSize()
- errorsBefore := base.Errors()
- n.Ntype = typecheckNtype(n.Ntype)
- if underlying := n.Ntype.Type(); underlying != nil {
- t.SetUnderlying(underlying)
- } else {
- n.SetDiag(true)
- n.SetType(nil)
- }
- if t.Kind() == types.TFORW && base.Errors() > errorsBefore {
- // Something went wrong during type-checking,
- // but it was reported. Silence future errors.
- t.SetBroke(true)
- }
- types.ResumeCheckSize()
-}
-
-func typecheckdef(n ir.Node) {
- if base.EnableTrace && base.Flag.LowerT {
- defer tracePrint("typecheckdef", n)(nil)
- }
-
- lno := ir.SetPos(n)
-
- if n.Op() == ir.ONONAME {
- if !n.Diag() {
- n.SetDiag(true)
-
- // Note: adderrorname looks for this string and
- // adds context about the outer expression
- base.ErrorfAt(base.Pos, "undefined: %v", n.Sym())
- }
- base.Pos = lno
- return
- }
-
- if n.Walkdef() == 1 {
- base.Pos = lno
- return
- }
-
- typecheckdefstack = append(typecheckdefstack, n)
- if n.Walkdef() == 2 {
- base.FlushErrors()
- fmt.Printf("typecheckdef loop:")
- for i := len(typecheckdefstack) - 1; i >= 0; i-- {
- n := typecheckdefstack[i]
- fmt.Printf(" %v", n.Sym())
- }
- fmt.Printf("\n")
- base.Fatalf("typecheckdef loop")
- }
-
- n.SetWalkdef(2)
-
- if n.Type() != nil || n.Sym() == nil { // builtin or no name
- goto ret
- }
-
- switch n.Op() {
- default:
- base.Fatalf("typecheckdef %v", n.Op())
-
- case ir.OLITERAL:
- if n.Name().Ntype != nil {
- n.Name().Ntype = typecheckNtype(n.Name().Ntype)
- n.SetType(n.Name().Ntype.Type())
- n.Name().Ntype = nil
- if n.Type() == nil {
- n.SetDiag(true)
- goto ret
- }
- }
-
- e := n.Name().Defn
- n.Name().Defn = nil
- if e == nil {
- ir.Dump("typecheckdef nil defn", n)
- base.ErrorfAt(n.Pos(), "xxx")
- }
-
- e = typecheck(e, ctxExpr)
- if e.Type() == nil {
- goto ret
- }
- if !ir.IsConstNode(e) {
- if !e.Diag() {
- if e.Op() == ir.ONIL {
- base.ErrorfAt(n.Pos(), "const initializer cannot be nil")
- } else {
- base.ErrorfAt(n.Pos(), "const initializer %v is not a constant", e)
- }
- e.SetDiag(true)
- }
- goto ret
- }
-
- t := n.Type()
- if t != nil {
- if !ir.OKForConst[t.Kind()] {
- base.ErrorfAt(n.Pos(), "invalid constant type %v", t)
- goto ret
- }
-
- if !e.Type().IsUntyped() && !types.Identical(t, e.Type()) {
- base.ErrorfAt(n.Pos(), "cannot use %L as type %v in const initializer", e, t)
- goto ret
- }
-
- e = convlit(e, t)
- }
-
- n.SetType(e.Type())
- if n.Type() != nil {
- n.SetVal(e.Val())
- }
-
- case ir.ONAME:
- n := n.(*ir.Name)
- if n.Name().Ntype != nil {
- n.Name().Ntype = typecheckNtype(n.Name().Ntype)
- n.SetType(n.Name().Ntype.Type())
- if n.Type() == nil {
- n.SetDiag(true)
- goto ret
- }
- }
-
- if n.Type() != nil {
- break
- }
- if n.Name().Defn == nil {
- if n.BuiltinOp != 0 { // like OPRINTN
- break
- }
- if base.Errors() > 0 {
- // Can have undefined variables in x := foo
- // that make x have an n.name.Defn == nil.
- // If there are other errors anyway, don't
- // bother adding to the noise.
- break
- }
-
- base.Fatalf("var without type, init: %v", n.Sym())
- }
-
- if n.Name().Defn.Op() == ir.ONAME {
- n.Name().Defn = typecheck(n.Name().Defn, ctxExpr)
- n.SetType(n.Name().Defn.Type())
- break
- }
-
- n.Name().Defn = typecheck(n.Name().Defn, ctxStmt) // fills in n.Type
-
- case ir.OTYPE:
- n := n.(*ir.Name)
- if n.Alias() {
- // Type alias declaration: Simply use the rhs type - no need
- // to create a new type.
- // If we have a syntax error, name.Ntype may be nil.
- if n.Ntype != nil {
- n.Ntype = typecheckNtype(n.Ntype)
- n.SetType(n.Ntype.Type())
- if n.Type() == nil {
- n.SetDiag(true)
- goto ret
- }
- // For package-level type aliases, set n.Sym.Def so we can identify
- // it as a type alias during export. See also #31959.
- if n.Curfn == nil {
- n.Sym().Def = n.Ntype
- }
- }
- break
- }
-
- // regular type declaration
- typecheckdeftype(n)
- }
-
-ret:
- if n.Op() != ir.OLITERAL && n.Type() != nil && n.Type().IsUntyped() {
- base.Fatalf("got %v for %v", n.Type(), n)
- }
- last := len(typecheckdefstack) - 1
- if typecheckdefstack[last] != n {
- base.Fatalf("typecheckdefstack mismatch")
- }
- typecheckdefstack[last] = nil
- typecheckdefstack = typecheckdefstack[:last]
-
- base.Pos = lno
- n.SetWalkdef(1)
-}
-
-func checkmake(t *types.Type, arg string, np *ir.Node) bool {
- n := *np
- if !n.Type().IsInteger() && n.Type().Kind() != types.TIDEAL {
- base.Errorf("non-integer %s argument in make(%v) - %v", arg, t, n.Type())
- return false
- }
-
- // Do range checks for constants before defaultlit
- // to avoid redundant "constant NNN overflows int" errors.
- if n.Op() == ir.OLITERAL {
- v := toint(n.Val())
- if constant.Sign(v) < 0 {
- base.Errorf("negative %s argument in make(%v)", arg, t)
- return false
- }
- if ir.ConstOverflow(v, types.Types[types.TINT]) {
- base.Errorf("%s argument too large in make(%v)", arg, t)
- return false
- }
- }
-
- // defaultlit is necessary for non-constants too: n might be 1.1<<k.
- // TODO(gri) The length argument requirements for (array/slice) make
- // are the same as for index expressions. Factor the code better;
- // for instance, indexlit might be called here and incorporate some
- // of the bounds checks done for make.
- n = defaultlit(n, types.Types[types.TINT])
- *np = n
-
- return true
-}
-
-// markBreak marks control statements containing break statements with SetHasBreak(true).
-func markBreak(fn *ir.Func) {
- var labels map[*types.Sym]ir.Node
- var implicit ir.Node
-
- var mark func(ir.Node) error
- mark = func(n ir.Node) error {
- switch n.Op() {
- default:
- ir.DoChildren(n, mark)
-
- case ir.OBREAK:
- n := n.(*ir.BranchStmt)
- if n.Label == nil {
- setHasBreak(implicit)
- } else {
- setHasBreak(labels[n.Label])
- }
-
- case ir.OFOR, ir.OFORUNTIL, ir.OSWITCH, ir.OSELECT, ir.ORANGE:
- old := implicit
- implicit = n
- var sym *types.Sym
- switch n := n.(type) {
- case *ir.ForStmt:
- sym = n.Label
- case *ir.RangeStmt:
- sym = n.Label
- case *ir.SelectStmt:
- sym = n.Label
- case *ir.SwitchStmt:
- sym = n.Label
- }
- if sym != nil {
- if labels == nil {
- // Map creation delayed until we need it - most functions don't.
- labels = make(map[*types.Sym]ir.Node)
- }
- labels[sym] = n
- }
- ir.DoChildren(n, mark)
- if sym != nil {
- delete(labels, sym)
- }
- implicit = old
- }
- return nil
- }
-
- mark(fn)
-}
-
-func controlLabel(n ir.Node) *types.Sym {
- switch n := n.(type) {
- default:
- base.Fatalf("controlLabel %+v", n.Op())
- return nil
- case *ir.ForStmt:
- return n.Label
- case *ir.RangeStmt:
- return n.Label
- case *ir.SelectStmt:
- return n.Label
- case *ir.SwitchStmt:
- return n.Label
- }
-}
-
-func setHasBreak(n ir.Node) {
- switch n := n.(type) {
- default:
- base.Fatalf("setHasBreak %+v", n.Op())
- case nil:
- // ignore
- case *ir.ForStmt:
- n.HasBreak = true
- case *ir.RangeStmt:
- n.HasBreak = true
- case *ir.SelectStmt:
- n.HasBreak = true
- case *ir.SwitchStmt:
- n.HasBreak = true
- }
-}
-
-// isTermNodes reports whether the Nodes list ends with a terminating statement.
-func isTermNodes(l ir.Nodes) bool {
- s := l
- c := len(s)
- if c == 0 {
- return false
- }
- return isTermNode(s[c-1])
-}
-
-// isTermNode reports whether the node n, the last one in a
-// statement list, is a terminating statement.
-func isTermNode(n ir.Node) bool {
- switch n.Op() {
- // NOTE: OLABEL is treated as a separate statement,
- // not a separate prefix, so skipping to the last statement
- // in the block handles the labeled statement case by
- // skipping over the label. No case OLABEL here.
-
- case ir.OBLOCK:
- n := n.(*ir.BlockStmt)
- return isTermNodes(n.List)
-
- case ir.OGOTO, ir.ORETURN, ir.ORETJMP, ir.OPANIC, ir.OFALL:
- return true
-
- case ir.OFOR, ir.OFORUNTIL:
- n := n.(*ir.ForStmt)
- if n.Cond != nil {
- return false
- }
- if n.HasBreak {
- return false
- }
- return true
-
- case ir.OIF:
- n := n.(*ir.IfStmt)
- return isTermNodes(n.Body) && isTermNodes(n.Else)
-
- case ir.OSWITCH:
- n := n.(*ir.SwitchStmt)
- if n.HasBreak {
- return false
- }
- def := false
- for _, cas := range n.Cases {
- cas := cas.(*ir.CaseStmt)
- if !isTermNodes(cas.Body) {
- return false
- }
- if len(cas.List) == 0 { // default
- def = true
- }
- }
- return def
-
- case ir.OSELECT:
- n := n.(*ir.SelectStmt)
- if n.HasBreak {
- return false
- }
- for _, cas := range n.Cases {
- cas := cas.(*ir.CaseStmt)
- if !isTermNodes(cas.Body) {
- return false
- }
- }
- return true
- }
-
- return false
-}
-
-// checkreturn makes sure that fn terminates appropriately.
-func checkreturn(fn *ir.Func) {
- if fn.Type().NumResults() != 0 && len(fn.Body) != 0 {
- markBreak(fn)
- if !isTermNodes(fn.Body) {
- base.ErrorfAt(fn.Endlineno, "missing return at end of function")
- }
- }
-}
-
-func deadcode(fn *ir.Func) {
- deadcodeslice(&fn.Body)
-
- if len(fn.Body) == 0 {
- return
- }
-
- for _, n := range fn.Body {
- if len(n.Init()) > 0 {
- return
- }
- switch n.Op() {
- case ir.OIF:
- n := n.(*ir.IfStmt)
- if !ir.IsConst(n.Cond, constant.Bool) || len(n.Body) > 0 || len(n.Else) > 0 {
- return
- }
- case ir.OFOR:
- n := n.(*ir.ForStmt)
- if !ir.IsConst(n.Cond, constant.Bool) || ir.BoolVal(n.Cond) {
- return
- }
- default:
- return
- }
- }
-
- fn.Body.Set([]ir.Node{ir.NewBlockStmt(base.Pos, nil)})
-}
-
-func deadcodeslice(nn *ir.Nodes) {
- var lastLabel = -1
- for i, n := range *nn {
- if n != nil && n.Op() == ir.OLABEL {
- lastLabel = i
- }
- }
- for i, n := range *nn {
- // Cut is set to true when all nodes after i'th position
- // should be removed.
- // In other words, it marks whole slice "tail" as dead.
- cut := false
- if n == nil {
- continue
- }
- if n.Op() == ir.OIF {
- n := n.(*ir.IfStmt)
- n.Cond = deadcodeexpr(n.Cond)
- if ir.IsConst(n.Cond, constant.Bool) {
- var body ir.Nodes
- if ir.BoolVal(n.Cond) {
- n.Else = ir.Nodes{}
- body = n.Body
- } else {
- n.Body = ir.Nodes{}
- body = n.Else
- }
- // If "then" or "else" branch ends with panic or return statement,
- // it is safe to remove all statements after this node.
- // isterminating is not used to avoid goto-related complications.
- // We must be careful not to deadcode-remove labels, as they
- // might be the target of a goto. See issue 28616.
- if body := body; len(body) != 0 {
- switch body[(len(body) - 1)].Op() {
- case ir.ORETURN, ir.ORETJMP, ir.OPANIC:
- if i > lastLabel {
- cut = true
- }
- }
- }
- }
- }
-
- deadcodeslice(n.PtrInit())
- switch n.Op() {
- case ir.OBLOCK:
- n := n.(*ir.BlockStmt)
- deadcodeslice(&n.List)
- case ir.OCASE:
- n := n.(*ir.CaseStmt)
- deadcodeslice(&n.Body)
- case ir.OFOR:
- n := n.(*ir.ForStmt)
- deadcodeslice(&n.Body)
- case ir.OIF:
- n := n.(*ir.IfStmt)
- deadcodeslice(&n.Body)
- deadcodeslice(&n.Else)
- case ir.ORANGE:
- n := n.(*ir.RangeStmt)
- deadcodeslice(&n.Body)
- case ir.OSELECT:
- n := n.(*ir.SelectStmt)
- deadcodeslice(&n.Cases)
- case ir.OSWITCH:
- n := n.(*ir.SwitchStmt)
- deadcodeslice(&n.Cases)
- }
-
- if cut {
- nn.Set((*nn)[:i+1])
- break
- }
- }
-}
-
-func deadcodeexpr(n ir.Node) ir.Node {
- // Perform dead-code elimination on short-circuited boolean
- // expressions involving constants with the intent of
- // producing a constant 'if' condition.
- switch n.Op() {
- case ir.OANDAND:
- n := n.(*ir.LogicalExpr)
- n.X = deadcodeexpr(n.X)
- n.Y = deadcodeexpr(n.Y)
- if ir.IsConst(n.X, constant.Bool) {
- if ir.BoolVal(n.X) {
- return n.Y // true && x => x
- } else {
- return n.X // false && x => false
- }
- }
- case ir.OOROR:
- n := n.(*ir.LogicalExpr)
- n.X = deadcodeexpr(n.X)
- n.Y = deadcodeexpr(n.Y)
- if ir.IsConst(n.X, constant.Bool) {
- if ir.BoolVal(n.X) {
- return n.X // true || x => true
- } else {
- return n.Y // false || x => x
- }
- }
- }
- return n
-}
-
-// getIotaValue returns the current value for "iota",
-// or -1 if not within a ConstSpec.
-func getIotaValue() int64 {
- if i := len(typecheckdefstack); i > 0 {
- if x := typecheckdefstack[i-1]; x.Op() == ir.OLITERAL {
- return x.(*ir.Name).Iota()
- }
- }
-
- if ir.CurFunc != nil && ir.CurFunc.Iota >= 0 {
- return ir.CurFunc.Iota
- }
-
- return -1
-}
-
-// curpkg returns the current package, based on Curfn.
-func curpkg() *types.Pkg {
- fn := ir.CurFunc
- if fn == nil {
- // Initialization expressions for package-scope variables.
- return types.LocalPkg
- }
- return fnpkg(fn.Nname)
-}
diff --git a/src/cmd/compile/internal/gc/types.go b/src/cmd/compile/internal/gc/types.go
deleted file mode 100644
index e46735df28..0000000000
--- a/src/cmd/compile/internal/gc/types.go
+++ /dev/null
@@ -1,5 +0,0 @@
-// Copyright 2009 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package gc
diff --git a/src/cmd/compile/internal/gc/types_acc.go b/src/cmd/compile/internal/gc/types_acc.go
deleted file mode 100644
index d6d53f05cc..0000000000
--- a/src/cmd/compile/internal/gc/types_acc.go
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright 2017 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// This file implements convertions between *types.Node and *Node.
-// TODO(gri) try to eliminate these soon
-
-package gc
diff --git a/src/cmd/compile/internal/gc/universe.go b/src/cmd/compile/internal/gc/universe.go
deleted file mode 100644
index 5d59fdbbc5..0000000000
--- a/src/cmd/compile/internal/gc/universe.go
+++ /dev/null
@@ -1,347 +0,0 @@
-// Copyright 2009 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// TODO(gri) This file should probably become part of package types.
-
-package gc
-
-import (
- "cmd/compile/internal/base"
- "cmd/compile/internal/ir"
- "cmd/compile/internal/types"
- "cmd/internal/src"
- "go/constant"
-)
-
-var basicTypes = [...]struct {
- name string
- etype types.Kind
-}{
- {"int8", types.TINT8},
- {"int16", types.TINT16},
- {"int32", types.TINT32},
- {"int64", types.TINT64},
- {"uint8", types.TUINT8},
- {"uint16", types.TUINT16},
- {"uint32", types.TUINT32},
- {"uint64", types.TUINT64},
- {"float32", types.TFLOAT32},
- {"float64", types.TFLOAT64},
- {"complex64", types.TCOMPLEX64},
- {"complex128", types.TCOMPLEX128},
- {"bool", types.TBOOL},
- {"string", types.TSTRING},
-}
-
-var typedefs = [...]struct {
- name string
- etype types.Kind
- sameas32 types.Kind
- sameas64 types.Kind
-}{
- {"int", types.TINT, types.TINT32, types.TINT64},
- {"uint", types.TUINT, types.TUINT32, types.TUINT64},
- {"uintptr", types.TUINTPTR, types.TUINT32, types.TUINT64},
-}
-
-var builtinFuncs = [...]struct {
- name string
- op ir.Op
-}{
- {"append", ir.OAPPEND},
- {"cap", ir.OCAP},
- {"close", ir.OCLOSE},
- {"complex", ir.OCOMPLEX},
- {"copy", ir.OCOPY},
- {"delete", ir.ODELETE},
- {"imag", ir.OIMAG},
- {"len", ir.OLEN},
- {"make", ir.OMAKE},
- {"new", ir.ONEW},
- {"panic", ir.OPANIC},
- {"print", ir.OPRINT},
- {"println", ir.OPRINTN},
- {"real", ir.OREAL},
- {"recover", ir.ORECOVER},
-}
-
-var unsafeFuncs = [...]struct {
- name string
- op ir.Op
-}{
- {"Alignof", ir.OALIGNOF},
- {"Offsetof", ir.OOFFSETOF},
- {"Sizeof", ir.OSIZEOF},
-}
-
-// initUniverse initializes the universe block.
-func initUniverse() {
- if types.PtrSize == 0 {
- base.Fatalf("typeinit before betypeinit")
- }
-
- types.SlicePtrOffset = 0
- types.SliceLenOffset = types.Rnd(types.SlicePtrOffset+int64(types.PtrSize), int64(types.PtrSize))
- types.SliceCapOffset = types.Rnd(types.SliceLenOffset+int64(types.PtrSize), int64(types.PtrSize))
- types.SliceSize = types.Rnd(types.SliceCapOffset+int64(types.PtrSize), int64(types.PtrSize))
-
- // string is same as slice wo the cap
- types.StringSize = types.Rnd(types.SliceLenOffset+int64(types.PtrSize), int64(types.PtrSize))
-
- for et := types.Kind(0); et < types.NTYPE; et++ {
- types.SimType[et] = et
- }
-
- types.Types[types.TANY] = types.New(types.TANY)
- types.Types[types.TINTER] = types.NewInterface(types.LocalPkg, nil)
-
- defBasic := func(kind types.Kind, pkg *types.Pkg, name string) *types.Type {
- sym := pkg.Lookup(name)
- n := ir.NewDeclNameAt(src.NoXPos, ir.OTYPE, sym)
- t := types.NewBasic(kind, n)
- n.SetType(t)
- sym.Def = n
- if kind != types.TANY {
- types.CalcSize(t)
- }
- return t
- }
-
- for _, s := range &basicTypes {
- types.Types[s.etype] = defBasic(s.etype, types.BuiltinPkg, s.name)
- }
-
- for _, s := range &typedefs {
- sameas := s.sameas32
- if types.PtrSize == 8 {
- sameas = s.sameas64
- }
- types.SimType[s.etype] = sameas
-
- types.Types[s.etype] = defBasic(s.etype, types.BuiltinPkg, s.name)
- }
-
- // We create separate byte and rune types for better error messages
- // rather than just creating type alias *types.Sym's for the uint8 and
- // int32 types. Hence, (bytetype|runtype).Sym.isAlias() is false.
- // TODO(gri) Should we get rid of this special case (at the cost
- // of less informative error messages involving bytes and runes)?
- // (Alternatively, we could introduce an OTALIAS node representing
- // type aliases, albeit at the cost of having to deal with it everywhere).
- types.ByteType = defBasic(types.TUINT8, types.BuiltinPkg, "byte")
- types.RuneType = defBasic(types.TINT32, types.BuiltinPkg, "rune")
-
- // error type
- s := types.BuiltinPkg.Lookup("error")
- n := ir.NewDeclNameAt(src.NoXPos, ir.OTYPE, s)
- types.ErrorType = types.NewNamed(n)
- types.ErrorType.SetUnderlying(makeErrorInterface())
- n.SetType(types.ErrorType)
- s.Def = n
- types.CalcSize(types.ErrorType)
-
- types.Types[types.TUNSAFEPTR] = defBasic(types.TUNSAFEPTR, ir.Pkgs.Unsafe, "Pointer")
-
- // simple aliases
- types.SimType[types.TMAP] = types.TPTR
- types.SimType[types.TCHAN] = types.TPTR
- types.SimType[types.TFUNC] = types.TPTR
- types.SimType[types.TUNSAFEPTR] = types.TPTR
-
- for _, s := range &builtinFuncs {
- s2 := types.BuiltinPkg.Lookup(s.name)
- def := NewName(s2)
- def.BuiltinOp = s.op
- s2.Def = def
- }
-
- for _, s := range &unsafeFuncs {
- s2 := ir.Pkgs.Unsafe.Lookup(s.name)
- def := NewName(s2)
- def.BuiltinOp = s.op
- s2.Def = def
- }
-
- s = types.BuiltinPkg.Lookup("true")
- s.Def = ir.NewConstAt(src.NoXPos, s, types.UntypedBool, constant.MakeBool(true))
-
- s = types.BuiltinPkg.Lookup("false")
- s.Def = ir.NewConstAt(src.NoXPos, s, types.UntypedBool, constant.MakeBool(false))
-
- s = lookup("_")
- types.BlankSym = s
- s.Block = -100
- s.Def = NewName(s)
- types.Types[types.TBLANK] = types.New(types.TBLANK)
- ir.AsNode(s.Def).SetType(types.Types[types.TBLANK])
- ir.BlankNode = ir.AsNode(s.Def)
- ir.BlankNode.SetTypecheck(1)
-
- s = types.BuiltinPkg.Lookup("_")
- s.Block = -100
- s.Def = NewName(s)
- types.Types[types.TBLANK] = types.New(types.TBLANK)
- ir.AsNode(s.Def).SetType(types.Types[types.TBLANK])
-
- types.Types[types.TNIL] = types.New(types.TNIL)
- s = types.BuiltinPkg.Lookup("nil")
- nnil := nodnil()
- nnil.(*ir.NilExpr).SetSym(s)
- s.Def = nnil
-
- s = types.BuiltinPkg.Lookup("iota")
- s.Def = ir.NewIota(base.Pos, s)
-
- for et := types.TINT8; et <= types.TUINT64; et++ {
- types.IsInt[et] = true
- }
- types.IsInt[types.TINT] = true
- types.IsInt[types.TUINT] = true
- types.IsInt[types.TUINTPTR] = true
-
- types.IsFloat[types.TFLOAT32] = true
- types.IsFloat[types.TFLOAT64] = true
-
- types.IsComplex[types.TCOMPLEX64] = true
- types.IsComplex[types.TCOMPLEX128] = true
-
- // initialize okfor
- for et := types.Kind(0); et < types.NTYPE; et++ {
- if types.IsInt[et] || et == types.TIDEAL {
- okforeq[et] = true
- types.IsOrdered[et] = true
- okforarith[et] = true
- okforadd[et] = true
- okforand[et] = true
- ir.OKForConst[et] = true
- types.IsSimple[et] = true
- }
-
- if types.IsFloat[et] {
- okforeq[et] = true
- types.IsOrdered[et] = true
- okforadd[et] = true
- okforarith[et] = true
- ir.OKForConst[et] = true
- types.IsSimple[et] = true
- }
-
- if types.IsComplex[et] {
- okforeq[et] = true
- okforadd[et] = true
- okforarith[et] = true
- ir.OKForConst[et] = true
- types.IsSimple[et] = true
- }
- }
-
- types.IsSimple[types.TBOOL] = true
-
- okforadd[types.TSTRING] = true
-
- okforbool[types.TBOOL] = true
-
- okforcap[types.TARRAY] = true
- okforcap[types.TCHAN] = true
- okforcap[types.TSLICE] = true
-
- ir.OKForConst[types.TBOOL] = true
- ir.OKForConst[types.TSTRING] = true
-
- okforlen[types.TARRAY] = true
- okforlen[types.TCHAN] = true
- okforlen[types.TMAP] = true
- okforlen[types.TSLICE] = true
- okforlen[types.TSTRING] = true
-
- okforeq[types.TPTR] = true
- okforeq[types.TUNSAFEPTR] = true
- okforeq[types.TINTER] = true
- okforeq[types.TCHAN] = true
- okforeq[types.TSTRING] = true
- okforeq[types.TBOOL] = true
- okforeq[types.TMAP] = true // nil only; refined in typecheck
- okforeq[types.TFUNC] = true // nil only; refined in typecheck
- okforeq[types.TSLICE] = true // nil only; refined in typecheck
- okforeq[types.TARRAY] = true // only if element type is comparable; refined in typecheck
- okforeq[types.TSTRUCT] = true // only if all struct fields are comparable; refined in typecheck
-
- types.IsOrdered[types.TSTRING] = true
-
- for i := range okfor {
- okfor[i] = okfornone[:]
- }
-
- // binary
- okfor[ir.OADD] = okforadd[:]
- okfor[ir.OAND] = okforand[:]
- okfor[ir.OANDAND] = okforbool[:]
- okfor[ir.OANDNOT] = okforand[:]
- okfor[ir.ODIV] = okforarith[:]
- okfor[ir.OEQ] = okforeq[:]
- okfor[ir.OGE] = types.IsOrdered[:]
- okfor[ir.OGT] = types.IsOrdered[:]
- okfor[ir.OLE] = types.IsOrdered[:]
- okfor[ir.OLT] = types.IsOrdered[:]
- okfor[ir.OMOD] = okforand[:]
- okfor[ir.OMUL] = okforarith[:]
- okfor[ir.ONE] = okforeq[:]
- okfor[ir.OOR] = okforand[:]
- okfor[ir.OOROR] = okforbool[:]
- okfor[ir.OSUB] = okforarith[:]
- okfor[ir.OXOR] = okforand[:]
- okfor[ir.OLSH] = okforand[:]
- okfor[ir.ORSH] = okforand[:]
-
- // unary
- okfor[ir.OBITNOT] = okforand[:]
- okfor[ir.ONEG] = okforarith[:]
- okfor[ir.ONOT] = okforbool[:]
- okfor[ir.OPLUS] = okforarith[:]
-
- // special
- okfor[ir.OCAP] = okforcap[:]
- okfor[ir.OLEN] = okforlen[:]
-
- // comparison
- iscmp[ir.OLT] = true
- iscmp[ir.OGT] = true
- iscmp[ir.OGE] = true
- iscmp[ir.OLE] = true
- iscmp[ir.OEQ] = true
- iscmp[ir.ONE] = true
-}
-
-func makeErrorInterface() *types.Type {
- sig := types.NewSignature(types.NoPkg, fakeRecvField(), nil, []*types.Field{
- types.NewField(src.NoXPos, nil, types.Types[types.TSTRING]),
- })
- method := types.NewField(src.NoXPos, lookup("Error"), sig)
- return types.NewInterface(types.NoPkg, []*types.Field{method})
-}
-
-// finishUniverse makes the universe block visible within the current package.
-func finishUniverse() {
- // Operationally, this is similar to a dot import of builtinpkg, except
- // that we silently skip symbols that are already declared in the
- // package block rather than emitting a redeclared symbol error.
-
- for _, s := range types.BuiltinPkg.Syms {
- if s.Def == nil {
- continue
- }
- s1 := lookup(s.Name)
- if s1.Def != nil {
- continue
- }
-
- s1.Def = s.Def
- s1.Block = s.Block
- }
-
- ir.RegFP = NewName(lookup(".fp"))
- ir.RegFP.SetType(types.Types[types.TINT32])
- ir.RegFP.Class_ = ir.PPARAM
- ir.RegFP.SetUsed(true)
-}
diff --git a/src/cmd/compile/internal/gc/unsafe.go b/src/cmd/compile/internal/gc/unsafe.go
deleted file mode 100644
index d37ebfff31..0000000000
--- a/src/cmd/compile/internal/gc/unsafe.go
+++ /dev/null
@@ -1,90 +0,0 @@
-// Copyright 2009 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package gc
-
-import (
- "cmd/compile/internal/base"
- "cmd/compile/internal/ir"
- "cmd/compile/internal/types"
-)
-
-// evalunsafe evaluates a package unsafe operation and returns the result.
-func evalunsafe(n ir.Node) int64 {
- switch n.Op() {
- case ir.OALIGNOF, ir.OSIZEOF:
- n := n.(*ir.UnaryExpr)
- n.X = typecheck(n.X, ctxExpr)
- n.X = defaultlit(n.X, nil)
- tr := n.X.Type()
- if tr == nil {
- return 0
- }
- types.CalcSize(tr)
- if n.Op() == ir.OALIGNOF {
- return int64(tr.Align)
- }
- return tr.Width
-
- case ir.OOFFSETOF:
- // must be a selector.
- n := n.(*ir.UnaryExpr)
- if n.X.Op() != ir.OXDOT {
- base.Errorf("invalid expression %v", n)
- return 0
- }
- sel := n.X.(*ir.SelectorExpr)
-
- // Remember base of selector to find it back after dot insertion.
- // Since r->left may be mutated by typechecking, check it explicitly
- // first to track it correctly.
- sel.X = typecheck(sel.X, ctxExpr)
- sbase := sel.X
-
- tsel := typecheck(sel, ctxExpr)
- n.X = tsel
- if tsel.Type() == nil {
- return 0
- }
- switch tsel.Op() {
- case ir.ODOT, ir.ODOTPTR:
- break
- case ir.OCALLPART:
- base.Errorf("invalid expression %v: argument is a method value", n)
- return 0
- default:
- base.Errorf("invalid expression %v", n)
- return 0
- }
-
- // Sum offsets for dots until we reach sbase.
- var v int64
- var next ir.Node
- for r := tsel; r != sbase; r = next {
- switch r.Op() {
- case ir.ODOTPTR:
- // For Offsetof(s.f), s may itself be a pointer,
- // but accessing f must not otherwise involve
- // indirection via embedded pointer types.
- r := r.(*ir.SelectorExpr)
- if r.X != sbase {
- base.Errorf("invalid expression %v: selector implies indirection of embedded %v", n, r.X)
- return 0
- }
- fallthrough
- case ir.ODOT:
- r := r.(*ir.SelectorExpr)
- v += r.Offset
- next = r.X
- default:
- ir.Dump("unsafenmagic", tsel)
- base.Fatalf("impossible %v node after dot insertion", r.Op())
- }
- }
- return v
- }
-
- base.Fatalf("unexpected op %v", n.Op())
- return 0
-}
diff --git a/src/cmd/compile/internal/gc/walk.go b/src/cmd/compile/internal/gc/walk.go
index 764c5c41b0..73f82f333c 100644
--- a/src/cmd/compile/internal/gc/walk.go
+++ b/src/cmd/compile/internal/gc/walk.go
@@ -7,6 +7,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
+ "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/objabi"
@@ -42,7 +43,7 @@ func walk(fn *ir.Func) {
// Final typecheck for any unused variables.
for i, ln := range fn.Dcl {
if ln.Op() == ir.ONAME && (ln.Class_ == ir.PAUTO || ln.Class_ == ir.PAUTOHEAP) {
- ln = typecheck(ln, ctxExpr|ctxAssign).(*ir.Name)
+ ln = typecheck.AssignExpr(ln).(*ir.Name)
fn.Dcl[i] = ln
}
}
@@ -191,7 +192,7 @@ func walkstmt(n ir.Node) ir.Node {
n.PtrInit().Set(nil)
n.X = walkexpr(n.X, &init)
- call := walkexpr(mkcall1(chanfn("chanrecv1", 2, n.X.Type()), nil, &init, n.X, nodnil()), &init)
+ call := walkexpr(mkcall1(chanfn("chanrecv1", 2, n.X.Type()), nil, &init, n.X, typecheck.NodNil()), &init)
return ir.InitExpr(init, call)
case ir.OBREAK,
@@ -216,7 +217,7 @@ func walkstmt(n ir.Node) ir.Node {
}
nn := ir.NewAssignStmt(base.Pos, v.Name().Heapaddr, callnew(v.Type()))
nn.Def = true
- return walkstmt(typecheck(nn, ctxStmt))
+ return walkstmt(typecheck.Stmt(nn))
}
return n
@@ -325,7 +326,7 @@ func walkstmt(n ir.Node) ir.Node {
if cl == ir.PPARAMOUT {
var ln ir.Node = ln
if ir.IsParamStackCopy(ln) {
- ln = walkexpr(typecheck(ir.NewStarExpr(base.Pos, ln.Name().Heapaddr), ctxExpr), nil)
+ ln = walkexpr(typecheck.Expr(ir.NewStarExpr(base.Pos, ln.Name().Heapaddr)), nil)
}
rl = append(rl, ln)
}
@@ -504,7 +505,7 @@ func walkexpr(n ir.Node, init *ir.Nodes) ir.Node {
n := n.(*ir.Name)
nn := ir.NewStarExpr(base.Pos, n.Name().Heapaddr)
nn.X.MarkNonNil()
- return walkexpr(typecheck(nn, ctxExpr), init)
+ return walkexpr(typecheck.Expr(nn), init)
}
n = walkexpr1(n, init)
@@ -515,12 +516,12 @@ func walkexpr(n ir.Node, init *ir.Nodes) ir.Node {
// walk of y%1 may have replaced it by 0.
// Check whether n with its updated args is itself now a constant.
t := n.Type()
- n = evalConst(n)
+ n = typecheck.EvalConst(n)
if n.Type() != t {
base.Fatalf("evconst changed Type: %v had type %v, now %v", n, t, n.Type())
}
if n.Op() == ir.OLITERAL {
- n = typecheck(n, ctxExpr)
+ n = typecheck.Expr(n)
// Emit string symbol now to avoid emitting
// any concurrently during the backend.
if v := n.Val(); v.Kind() == constant.String {
@@ -604,7 +605,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
n := n.(*ir.UnaryExpr)
if isRuneCount(n) {
// Replace len([]rune(string)) with runtime.countrunes(string).
- return mkcall("countrunes", n.Type(), init, conv(n.X.(*ir.ConvExpr).X, types.Types[types.TSTRING]))
+ return mkcall("countrunes", n.Type(), init, typecheck.Conv(n.X.(*ir.ConvExpr).X, types.Types[types.TSTRING]))
}
n.X = walkexpr(n.X, init)
@@ -618,7 +619,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
}
if t.IsArray() {
safeexpr(n.X, init)
- con := origIntConst(n, t.NumElem())
+ con := typecheck.OrigInt(n, t.NumElem())
con.SetTypecheck(1)
return con
}
@@ -656,7 +657,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
case ir.ORECOVER:
n := n.(*ir.CallExpr)
- return mkcall("gorecover", n.Type(), init, nodAddr(ir.RegFP))
+ return mkcall("gorecover", n.Type(), init, typecheck.NodAddr(ir.RegFP))
case ir.OCLOSUREREAD, ir.OCFUNC:
return n
@@ -724,7 +725,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
if n.Op() == ir.OASOP {
// Rewrite x op= y into x = x op y.
- n = ir.NewAssignStmt(base.Pos, left, typecheck(ir.NewBinaryExpr(base.Pos, n.(*ir.AssignOpStmt).AsOp, left, right), ctxExpr))
+ n = ir.NewAssignStmt(base.Pos, left, typecheck.Expr(ir.NewBinaryExpr(base.Pos, n.(*ir.AssignOpStmt).AsOp, left, right)))
} else {
n.(*ir.AssignStmt).X = left
}
@@ -753,7 +754,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
recv := as.Y.(*ir.UnaryExpr)
recv.X = walkexpr(recv.X, init)
- n1 := nodAddr(as.X)
+ n1 := typecheck.NodAddr(as.X)
r := recv.X // the channel
return mkcall1(chanfn("chanrecv1", 2, r.Type()), nil, init, r, n1)
@@ -826,14 +827,14 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
r.X = walkexpr(r.X, init)
var n1 ir.Node
if ir.IsBlank(n.Lhs[0]) {
- n1 = nodnil()
+ n1 = typecheck.NodNil()
} else {
- n1 = nodAddr(n.Lhs[0])
+ n1 = typecheck.NodAddr(n.Lhs[0])
}
fn := chanfn("chanrecv2", 2, r.X.Type())
ok := n.Lhs[1]
call := mkcall1(fn, types.Types[types.TBOOL], init, r.X, n1)
- return typecheck(ir.NewAssignStmt(base.Pos, ok, call), ctxStmt)
+ return typecheck.Stmt(ir.NewAssignStmt(base.Pos, ok, call))
// a,b = m[i]
case ir.OAS2MAPR:
@@ -854,7 +855,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
} else {
// standard version takes key by reference
// order.expr made sure key is addressable.
- key = nodAddr(r.Index)
+ key = typecheck.NodAddr(r.Index)
}
// from:
@@ -885,10 +886,10 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
// don't generate a = *var if a is _
if ir.IsBlank(a) {
- return walkexpr(typecheck(n, ctxStmt), init)
+ return walkexpr(typecheck.Stmt(n), init)
}
- var_ := temp(types.NewPtr(t.Elem()))
+ var_ := typecheck.Temp(types.NewPtr(t.Elem()))
var_.SetTypecheck(1)
var_.MarkNonNil() // mapaccess always returns a non-nil pointer
@@ -896,7 +897,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
init.Append(walkexpr(n, init))
as := ir.NewAssignStmt(base.Pos, a, ir.NewStarExpr(base.Pos, var_))
- return walkexpr(typecheck(as, ctxStmt), init)
+ return walkexpr(typecheck.Stmt(as), init)
case ir.ODELETE:
n := n.(*ir.CallExpr)
@@ -910,7 +911,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
fast := mapfast(t)
if fast == mapslow {
// order.stmt made sure key is addressable.
- key = nodAddr(key)
+ key = typecheck.NodAddr(key)
}
return mkcall1(mapfndel(mapdelete[fast], t), nil, init, typename(t), map_, key)
@@ -948,12 +949,12 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
}
if ir.Names.Staticuint64s == nil {
- ir.Names.Staticuint64s = NewName(ir.Pkgs.Runtime.Lookup("staticuint64s"))
+ ir.Names.Staticuint64s = typecheck.NewName(ir.Pkgs.Runtime.Lookup("staticuint64s"))
ir.Names.Staticuint64s.Class_ = ir.PEXTERN
// The actual type is [256]uint64, but we use [256*8]uint8 so we can address
// individual bytes.
ir.Names.Staticuint64s.SetType(types.NewArray(types.Types[types.TUINT8], 256*8))
- ir.Names.Zerobase = NewName(ir.Pkgs.Runtime.Lookup("zerobase"))
+ ir.Names.Zerobase = typecheck.NewName(ir.Pkgs.Runtime.Lookup("zerobase"))
ir.Names.Zerobase.Class_ = ir.PEXTERN
ir.Names.Zerobase.SetType(types.Types[types.TUINTPTR])
}
@@ -984,14 +985,14 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
value = n.X
case !fromType.IsInterface() && n.Esc() == ir.EscNone && fromType.Width <= 1024:
// n.Left does not escape. Use a stack temporary initialized to n.Left.
- value = temp(fromType)
- init.Append(typecheck(ir.NewAssignStmt(base.Pos, value, n.X), ctxStmt))
+ value = typecheck.Temp(fromType)
+ init.Append(typecheck.Stmt(ir.NewAssignStmt(base.Pos, value, n.X)))
}
if value != nil {
// Value is identical to n.Left.
// Construct the interface directly: {type/itab, &value}.
- l := ir.NewBinaryExpr(base.Pos, ir.OEFACE, typeword(), typecheck(nodAddr(value), ctxExpr))
+ l := ir.NewBinaryExpr(base.Pos, ir.OEFACE, typeword(), typecheck.Expr(typecheck.NodAddr(value)))
l.SetType(toType)
l.SetTypecheck(n.Typecheck())
return l
@@ -1005,15 +1006,15 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
// e = iface{tmp, i.data}
if toType.IsEmptyInterface() && fromType.IsInterface() && !fromType.IsEmptyInterface() {
// Evaluate the input interface.
- c := temp(fromType)
+ c := typecheck.Temp(fromType)
init.Append(ir.NewAssignStmt(base.Pos, c, n.X))
// Get the itab out of the interface.
- tmp := temp(types.NewPtr(types.Types[types.TUINT8]))
- init.Append(ir.NewAssignStmt(base.Pos, tmp, typecheck(ir.NewUnaryExpr(base.Pos, ir.OITAB, c), ctxExpr)))
+ tmp := typecheck.Temp(types.NewPtr(types.Types[types.TUINT8]))
+ init.Append(ir.NewAssignStmt(base.Pos, tmp, typecheck.Expr(ir.NewUnaryExpr(base.Pos, ir.OITAB, c))))
// Get the type out of the itab.
- nif := ir.NewIfStmt(base.Pos, typecheck(ir.NewBinaryExpr(base.Pos, ir.ONE, tmp, nodnil()), ctxExpr), nil, nil)
+ nif := ir.NewIfStmt(base.Pos, typecheck.Expr(ir.NewBinaryExpr(base.Pos, ir.ONE, tmp, typecheck.NodNil())), nil, nil)
nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, tmp, itabType(tmp))}
init.Append(nif)
@@ -1030,13 +1031,13 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
// Use a specialized conversion routine that only returns a data pointer.
// ptr = convT2X(val)
// e = iface{typ/tab, ptr}
- fn := syslook(fnname)
+ fn := typecheck.LookupRuntime(fnname)
types.CalcSize(fromType)
- fn = substArgTypes(fn, fromType)
+ fn = typecheck.SubstArgTypes(fn, fromType)
types.CalcSize(fn.Type())
call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, nil)
call.Args = []ir.Node{n.X}
- e := ir.NewBinaryExpr(base.Pos, ir.OEFACE, typeword(), safeexpr(walkexpr(typecheck(call, ctxExpr), init), init))
+ e := ir.NewBinaryExpr(base.Pos, ir.OEFACE, typeword(), safeexpr(walkexpr(typecheck.Expr(call), init), init))
e.SetType(toType)
e.SetTypecheck(1)
return e
@@ -1062,16 +1063,16 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
if !ir.IsAssignable(v) {
v = copyexpr(v, v.Type(), init)
}
- v = nodAddr(v)
+ v = typecheck.NodAddr(v)
}
types.CalcSize(fromType)
- fn := syslook(fnname)
- fn = substArgTypes(fn, fromType, toType)
+ fn := typecheck.LookupRuntime(fnname)
+ fn = typecheck.SubstArgTypes(fn, fromType, toType)
types.CalcSize(fn.Type())
call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, nil)
call.Args = []ir.Node{tab, v}
- return walkexpr(typecheck(call, ctxExpr), init)
+ return walkexpr(typecheck.Expr(call), init)
case ir.OCONV, ir.OCONVNOP:
n := n.(*ir.ConvExpr)
@@ -1092,7 +1093,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
return n
}
fn := types.BasicTypeNames[param] + "to" + types.BasicTypeNames[result]
- return conv(mkcall(fn, types.Types[result], init, conv(n.X, types.Types[param])), n.Type())
+ return typecheck.Conv(mkcall(fn, types.Types[result], init, typecheck.Conv(n.X, types.Types[param])), n.Type())
case ir.ODIV, ir.OMOD:
n := n.(*ir.BinaryExpr)
@@ -1104,8 +1105,8 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
if types.IsComplex[et] && n.Op() == ir.ODIV {
t := n.Type()
- call := mkcall("complex128div", types.Types[types.TCOMPLEX128], init, conv(n.X, types.Types[types.TCOMPLEX128]), conv(n.Y, types.Types[types.TCOMPLEX128]))
- return conv(call, t)
+ call := mkcall("complex128div", types.Types[types.TCOMPLEX128], init, typecheck.Conv(n.X, types.Types[types.TCOMPLEX128]), typecheck.Conv(n.Y, types.Types[types.TCOMPLEX128]))
+ return typecheck.Conv(call, t)
}
// Nothing to do for float divisions.
@@ -1150,7 +1151,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
} else {
fn += "mod"
}
- return mkcall(fn, n.Type(), init, conv(n.X, types.Types[et]), conv(n.Y, types.Types[et]))
+ return mkcall(fn, n.Type(), init, typecheck.Conv(n.X, types.Types[et]), typecheck.Conv(n.Y, types.Types[et]))
}
return n
@@ -1213,7 +1214,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
if fast == mapslow {
// standard version takes key by reference.
// order.expr made sure key is addressable.
- key = nodAddr(key)
+ key = typecheck.NodAddr(key)
}
call = mkcall1(mapfn(mapassign[fast], t), nil, init, typename(t), map_, key)
} else {
@@ -1222,7 +1223,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
if fast == mapslow {
// standard version takes key by reference.
// order.expr made sure key is addressable.
- key = nodAddr(key)
+ key = typecheck.NodAddr(key)
}
if w := t.Elem().Width; w <= zeroValSize {
@@ -1297,9 +1298,9 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
if n.Type().Elem().Width >= ir.MaxImplicitStackVarSize {
base.Fatalf("large ONEW with EscNone: %v", n)
}
- r := temp(n.Type().Elem())
- init.Append(typecheck(ir.NewAssignStmt(base.Pos, r, nil), ctxStmt)) // zero temp
- return typecheck(nodAddr(r), ctxExpr)
+ r := typecheck.Temp(n.Type().Elem())
+ init.Append(typecheck.Stmt(ir.NewAssignStmt(base.Pos, r, nil))) // zero temp
+ return typecheck.Expr(typecheck.NodAddr(r))
}
return callnew(n.Type().Elem())
@@ -1317,8 +1318,8 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
case ir.OCLOSE:
// cannot use chanfn - closechan takes any, not chan any
n := n.(*ir.UnaryExpr)
- fn := syslook("closechan")
- fn = substArgTypes(fn, n.X.Type())
+ fn := typecheck.LookupRuntime("closechan")
+ fn = typecheck.SubstArgTypes(fn, n.X.Type())
return mkcall1(fn, nil, init, n.X)
case ir.OMAKECHAN:
@@ -1337,7 +1338,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
argtype = types.Types[types.TINT]
}
- return mkcall1(chanfn(fnname, 1, n.Type()), n.Type(), init, typename(n.Type()), conv(size, argtype))
+ return mkcall1(chanfn(fnname, 1, n.Type()), n.Type(), init, typename(n.Type()), typecheck.Conv(size, argtype))
case ir.OMAKEMAP:
n := n.(*ir.MakeExpr)
@@ -1351,10 +1352,10 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
// Allocate hmap on stack.
// var hv hmap
- hv := temp(hmapType)
- init.Append(typecheck(ir.NewAssignStmt(base.Pos, hv, nil), ctxStmt))
+ hv := typecheck.Temp(hmapType)
+ init.Append(typecheck.Stmt(ir.NewAssignStmt(base.Pos, hv, nil)))
// h = &hv
- h = nodAddr(hv)
+ h = typecheck.NodAddr(hv)
// Allocate one bucket pointed to by hmap.buckets on stack if hint
// is not larger than BUCKETSIZE. In case hint is larger than
@@ -1377,11 +1378,11 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
nif.Likely = true
// var bv bmap
- bv := temp(bmap(t))
+ bv := typecheck.Temp(bmap(t))
nif.Body.Append(ir.NewAssignStmt(base.Pos, bv, nil))
// b = &bv
- b := nodAddr(bv)
+ b := typecheck.NodAddr(bv)
// h.buckets = b
bsym := hmapType.Field(5).Sym // hmap.buckets see reflect.go:hmap
@@ -1406,17 +1407,17 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
rand := mkcall("fastrand", types.Types[types.TUINT32], init)
hashsym := hmapType.Field(4).Sym // hmap.hash0 see reflect.go:hmap
appendWalkStmt(init, ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, h, hashsym), rand))
- return convnop(h, t)
+ return typecheck.ConvNop(h, t)
}
// Call runtime.makehmap to allocate an
// hmap on the heap and initialize hmap's hash0 field.
- fn := syslook("makemap_small")
- fn = substArgTypes(fn, t.Key(), t.Elem())
+ fn := typecheck.LookupRuntime("makemap_small")
+ fn = typecheck.SubstArgTypes(fn, t.Key(), t.Elem())
return mkcall1(fn, n.Type(), init)
}
if n.Esc() != ir.EscNone {
- h = nodnil()
+ h = typecheck.NodNil()
}
// Map initialization with a variable or large hint is
// more complicated. We therefore generate a call to
@@ -1437,9 +1438,9 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
argtype = types.Types[types.TINT]
}
- fn := syslook(fnname)
- fn = substArgTypes(fn, hmapType, t.Key(), t.Elem())
- return mkcall1(fn, n.Type(), init, typename(n.Type()), conv(hint, argtype), h)
+ fn := typecheck.LookupRuntime(fnname)
+ fn = typecheck.SubstArgTypes(fn, hmapType, t.Key(), t.Elem())
+ return mkcall1(fn, n.Type(), init, typename(n.Type()), typecheck.Conv(hint, argtype), h)
case ir.OMAKESLICE:
n := n.(*ir.MakeExpr)
@@ -1459,7 +1460,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
}
// var arr [r]T
// n = arr[:l]
- i := indexconst(r)
+ i := typecheck.IndexConst(r)
if i < 0 {
base.Fatalf("walkexpr: invalid index %v", r)
}
@@ -1471,19 +1472,19 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
// if len < 0 { panicmakeslicelen() }
// panicmakeslicecap()
// }
- nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGT, conv(l, types.Types[types.TUINT64]), ir.NewInt(i)), nil, nil)
+ nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGT, typecheck.Conv(l, types.Types[types.TUINT64]), ir.NewInt(i)), nil, nil)
niflen := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLT, l, ir.NewInt(0)), nil, nil)
niflen.Body = []ir.Node{mkcall("panicmakeslicelen", nil, init)}
nif.Body.Append(niflen, mkcall("panicmakeslicecap", nil, init))
- init.Append(typecheck(nif, ctxStmt))
+ init.Append(typecheck.Stmt(nif))
t = types.NewArray(t.Elem(), i) // [r]T
- var_ := temp(t)
+ var_ := typecheck.Temp(t)
appendWalkStmt(init, ir.NewAssignStmt(base.Pos, var_, nil)) // zero temp
r := ir.NewSliceExpr(base.Pos, ir.OSLICE, var_) // arr[:l]
r.SetSliceBounds(nil, l, nil)
// The conv is necessary in case n.Type is named.
- return walkexpr(typecheck(conv(r, n.Type()), ctxExpr), init)
+ return walkexpr(typecheck.Expr(typecheck.Conv(r, n.Type())), init)
}
// n escapes; set up a call to makeslice.
@@ -1507,11 +1508,11 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
m := ir.NewSliceHeaderExpr(base.Pos, nil, nil, nil, nil)
m.SetType(t)
- fn := syslook(fnname)
- m.Ptr = mkcall1(fn, types.Types[types.TUNSAFEPTR], init, typename(t.Elem()), conv(len, argtype), conv(cap, argtype))
+ fn := typecheck.LookupRuntime(fnname)
+ m.Ptr = mkcall1(fn, types.Types[types.TUNSAFEPTR], init, typename(t.Elem()), typecheck.Conv(len, argtype), typecheck.Conv(cap, argtype))
m.Ptr.MarkNonNil()
- m.LenCap = []ir.Node{conv(len, types.Types[types.TINT]), conv(cap, types.Types[types.TINT])}
- return walkexpr(typecheck(m, ctxExpr), init)
+ m.LenCap = []ir.Node{typecheck.Conv(len, types.Types[types.TINT]), typecheck.Conv(cap, types.Types[types.TINT])}
+ return walkexpr(typecheck.Expr(m), init)
case ir.OMAKESLICECOPY:
n := n.(*ir.MakeExpr)
@@ -1524,7 +1525,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem())
}
- length := conv(n.Len, types.Types[types.TINT])
+ length := typecheck.Conv(n.Len, types.Types[types.TINT])
copylen := ir.NewUnaryExpr(base.Pos, ir.OLEN, n.Cap)
copyptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, n.Cap)
@@ -1535,56 +1536,56 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
// We do not check for overflow of len(to)*elem.Width here
// since len(from) is an existing checked slice capacity
// with same elem.Width for the from slice.
- size := ir.NewBinaryExpr(base.Pos, ir.OMUL, conv(length, types.Types[types.TUINTPTR]), conv(ir.NewInt(t.Elem().Width), types.Types[types.TUINTPTR]))
+ size := ir.NewBinaryExpr(base.Pos, ir.OMUL, typecheck.Conv(length, types.Types[types.TUINTPTR]), typecheck.Conv(ir.NewInt(t.Elem().Width), types.Types[types.TUINTPTR]))
// instantiate mallocgc(size uintptr, typ *byte, needszero bool) unsafe.Pointer
- fn := syslook("mallocgc")
+ fn := typecheck.LookupRuntime("mallocgc")
sh := ir.NewSliceHeaderExpr(base.Pos, nil, nil, nil, nil)
- sh.Ptr = mkcall1(fn, types.Types[types.TUNSAFEPTR], init, size, nodnil(), ir.NewBool(false))
+ sh.Ptr = mkcall1(fn, types.Types[types.TUNSAFEPTR], init, size, typecheck.NodNil(), ir.NewBool(false))
sh.Ptr.MarkNonNil()
sh.LenCap = []ir.Node{length, length}
sh.SetType(t)
- s := temp(t)
- r := typecheck(ir.NewAssignStmt(base.Pos, s, sh), ctxStmt)
+ s := typecheck.Temp(t)
+ r := typecheck.Stmt(ir.NewAssignStmt(base.Pos, s, sh))
r = walkexpr(r, init)
init.Append(r)
// instantiate memmove(to *any, frm *any, size uintptr)
- fn = syslook("memmove")
- fn = substArgTypes(fn, t.Elem(), t.Elem())
+ fn = typecheck.LookupRuntime("memmove")
+ fn = typecheck.SubstArgTypes(fn, t.Elem(), t.Elem())
ncopy := mkcall1(fn, nil, init, ir.NewUnaryExpr(base.Pos, ir.OSPTR, s), copyptr, size)
- init.Append(walkexpr(typecheck(ncopy, ctxStmt), init))
+ init.Append(walkexpr(typecheck.Stmt(ncopy), init))
return s
}
// Replace make+copy with runtime.makeslicecopy.
// instantiate makeslicecopy(typ *byte, tolen int, fromlen int, from unsafe.Pointer) unsafe.Pointer
- fn := syslook("makeslicecopy")
+ fn := typecheck.LookupRuntime("makeslicecopy")
s := ir.NewSliceHeaderExpr(base.Pos, nil, nil, nil, nil)
- s.Ptr = mkcall1(fn, types.Types[types.TUNSAFEPTR], init, typename(t.Elem()), length, copylen, conv(copyptr, types.Types[types.TUNSAFEPTR]))
+ s.Ptr = mkcall1(fn, types.Types[types.TUNSAFEPTR], init, typename(t.Elem()), length, copylen, typecheck.Conv(copyptr, types.Types[types.TUNSAFEPTR]))
s.Ptr.MarkNonNil()
s.LenCap = []ir.Node{length, length}
s.SetType(t)
- return walkexpr(typecheck(s, ctxExpr), init)
+ return walkexpr(typecheck.Expr(s), init)
case ir.ORUNESTR:
n := n.(*ir.ConvExpr)
- a := nodnil()
+ a := typecheck.NodNil()
if n.Esc() == ir.EscNone {
t := types.NewArray(types.Types[types.TUINT8], 4)
- a = nodAddr(temp(t))
+ a = typecheck.NodAddr(typecheck.Temp(t))
}
// intstring(*[4]byte, rune)
- return mkcall("intstring", n.Type(), init, a, conv(n.X, types.Types[types.TINT64]))
+ return mkcall("intstring", n.Type(), init, a, typecheck.Conv(n.X, types.Types[types.TINT64]))
case ir.OBYTES2STR, ir.ORUNES2STR:
n := n.(*ir.ConvExpr)
- a := nodnil()
+ a := typecheck.NodNil()
if n.Esc() == ir.EscNone {
// Create temporary buffer for string on stack.
t := types.NewArray(types.Types[types.TUINT8], tmpstringbufsize)
- a = nodAddr(temp(t))
+ a = typecheck.NodAddr(typecheck.Temp(t))
}
if n.Op() == ir.ORUNES2STR {
// slicerunetostring(*[32]byte, []rune) string
@@ -1618,16 +1619,16 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
t := types.NewArray(types.Types[types.TUINT8], int64(len(sc)))
var a ir.Node
if n.Esc() == ir.EscNone && len(sc) <= int(ir.MaxImplicitStackVarSize) {
- a = nodAddr(temp(t))
+ a = typecheck.NodAddr(typecheck.Temp(t))
} else {
a = callnew(t)
}
- p := temp(t.PtrTo()) // *[n]byte
- init.Append(typecheck(ir.NewAssignStmt(base.Pos, p, a), ctxStmt))
+ p := typecheck.Temp(t.PtrTo()) // *[n]byte
+ init.Append(typecheck.Stmt(ir.NewAssignStmt(base.Pos, p, a)))
// Copy from the static string data to the [n]byte.
if len(sc) > 0 {
- as := ir.NewAssignStmt(base.Pos, ir.NewStarExpr(base.Pos, p), ir.NewStarExpr(base.Pos, convnop(ir.NewUnaryExpr(base.Pos, ir.OSPTR, s), t.PtrTo())))
+ as := ir.NewAssignStmt(base.Pos, ir.NewStarExpr(base.Pos, p), ir.NewStarExpr(base.Pos, typecheck.ConvNop(ir.NewUnaryExpr(base.Pos, ir.OSPTR, s), t.PtrTo())))
appendWalkStmt(init, as)
}
@@ -1638,14 +1639,14 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
return walkexpr(slice, init)
}
- a := nodnil()
+ a := typecheck.NodNil()
if n.Esc() == ir.EscNone {
// Create temporary buffer for slice on stack.
t := types.NewArray(types.Types[types.TUINT8], tmpstringbufsize)
- a = nodAddr(temp(t))
+ a = typecheck.NodAddr(typecheck.Temp(t))
}
// stringtoslicebyte(*32[byte], string) []byte
- return mkcall("stringtoslicebyte", n.Type(), init, a, conv(s, types.Types[types.TSTRING]))
+ return mkcall("stringtoslicebyte", n.Type(), init, a, typecheck.Conv(s, types.Types[types.TSTRING]))
case ir.OSTR2BYTESTMP:
// []byte(string) conversion that creates a slice
@@ -1661,14 +1662,14 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
case ir.OSTR2RUNES:
n := n.(*ir.ConvExpr)
- a := nodnil()
+ a := typecheck.NodNil()
if n.Esc() == ir.EscNone {
// Create temporary buffer for slice on stack.
t := types.NewArray(types.Types[types.TINT32], tmpstringbufsize)
- a = nodAddr(temp(t))
+ a = typecheck.NodAddr(typecheck.Temp(t))
}
// stringtoslicerune(*[32]rune, string) []rune
- return mkcall("stringtoslicerune", n.Type(), init, a, conv(n.X, types.Types[types.TSTRING]))
+ return mkcall("stringtoslicerune", n.Type(), init, a, typecheck.Conv(n.X, types.Types[types.TSTRING]))
case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT, ir.OSTRUCTLIT, ir.OPTRLIT:
if isStaticCompositeLiteral(n) && !canSSAType(n.Type()) {
@@ -1677,18 +1678,18 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
// Make direct reference to the static data. See issue 12841.
vstat := readonlystaticname(n.Type())
fixedlit(inInitFunction, initKindStatic, n, vstat, init)
- return typecheck(vstat, ctxExpr)
+ return typecheck.Expr(vstat)
}
- var_ := temp(n.Type())
+ var_ := typecheck.Temp(n.Type())
anylit(n, var_, init)
return var_
case ir.OSEND:
n := n.(*ir.SendStmt)
n1 := n.Value
- n1 = assignconv(n1, n.Chan.Type().Elem(), "chan send")
+ n1 = typecheck.AssignConv(n1, n.Chan.Type().Elem(), "chan send")
n1 = walkexpr(n1, init)
- n1 = nodAddr(n1)
+ n1 = typecheck.NodAddr(n1)
return mkcall1(chanfn("chansend1", 2, n.Chan.Type()), nil, init, n.Chan, n1)
case ir.OCLOSURE:
@@ -1871,8 +1872,8 @@ func ascompatet(nl ir.Nodes, nr *types.Type) []ir.Node {
// Any assignment to an lvalue that might cause a function call must be
// deferred until all the returned values have been read.
if fncall(l, r.Type) {
- tmp := ir.Node(temp(r.Type))
- tmp = typecheck(tmp, ctxExpr)
+ tmp := ir.Node(typecheck.Temp(r.Type))
+ tmp = typecheck.Expr(tmp)
a := convas(ir.NewAssignStmt(base.Pos, l, tmp), &mm)
mm.Append(a)
l = tmp
@@ -1895,48 +1896,6 @@ func ascompatet(nl ir.Nodes, nr *types.Type) []ir.Node {
return append(nn, mm...)
}
-// package all the arguments that match a ... T parameter into a []T.
-func mkdotargslice(typ *types.Type, args []ir.Node) ir.Node {
- var n ir.Node
- if len(args) == 0 {
- n = nodnil()
- n.SetType(typ)
- } else {
- lit := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(typ).(ir.Ntype), nil)
- lit.List.Append(args...)
- lit.SetImplicit(true)
- n = lit
- }
-
- n = typecheck(n, ctxExpr)
- if n.Type() == nil {
- base.Fatalf("mkdotargslice: typecheck failed")
- }
- return n
-}
-
-// fixVariadicCall rewrites calls to variadic functions to use an
-// explicit ... argument if one is not already present.
-func fixVariadicCall(call *ir.CallExpr) {
- fntype := call.X.Type()
- if !fntype.IsVariadic() || call.IsDDD {
- return
- }
-
- vi := fntype.NumParams() - 1
- vt := fntype.Params().Field(vi).Type
-
- args := call.Args
- extra := args[vi:]
- slice := mkdotargslice(vt, extra)
- for i := range extra {
- extra[i] = nil // allow GC
- }
-
- call.Args.Set(append(args[:vi], slice))
- call.IsDDD = true
-}
-
func walkCall(n *ir.CallExpr, init *ir.Nodes) {
if len(n.Rargs) != 0 {
return // already walked
@@ -1978,7 +1937,7 @@ func walkCall(n *ir.CallExpr, init *ir.Nodes) {
}
if base.Flag.Cfg.Instrumenting || fncall(arg, t) {
// make assignment of fncall to tempAt
- tmp := temp(t)
+ tmp := typecheck.Temp(t)
a := convas(ir.NewAssignStmt(base.Pos, tmp, arg), init)
tempAssigns = append(tempAssigns, a)
// replace arg with temp
@@ -2032,22 +1991,22 @@ func walkprint(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
for i, n := range nn.Args {
if n.Op() == ir.OLITERAL {
if n.Type() == types.UntypedRune {
- n = defaultlit(n, types.RuneType)
+ n = typecheck.DefaultLit(n, types.RuneType)
}
switch n.Val().Kind() {
case constant.Int:
- n = defaultlit(n, types.Types[types.TINT64])
+ n = typecheck.DefaultLit(n, types.Types[types.TINT64])
case constant.Float:
- n = defaultlit(n, types.Types[types.TFLOAT64])
+ n = typecheck.DefaultLit(n, types.Types[types.TFLOAT64])
}
}
if n.Op() != ir.OLITERAL && n.Type() != nil && n.Type().Kind() == types.TIDEAL {
- n = defaultlit(n, types.Types[types.TINT64])
+ n = typecheck.DefaultLit(n, types.Types[types.TINT64])
}
- n = defaultlit(n, nil)
+ n = typecheck.DefaultLit(n, nil)
nn.Args[i] = n
if n.Type() == nil || n.Type().Kind() == types.TFORW {
continue
@@ -2057,14 +2016,14 @@ func walkprint(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
switch n.Type().Kind() {
case types.TINTER:
if n.Type().IsEmptyInterface() {
- on = syslook("printeface")
+ on = typecheck.LookupRuntime("printeface")
} else {
- on = syslook("printiface")
+ on = typecheck.LookupRuntime("printiface")
}
- on = substArgTypes(on, n.Type()) // any-1
+ on = typecheck.SubstArgTypes(on, n.Type()) // any-1
case types.TPTR:
if n.Type().Elem().NotInHeap() {
- on = syslook("printuintptr")
+ on = typecheck.LookupRuntime("printuintptr")
n = ir.NewConvExpr(base.Pos, ir.OCONV, nil, n)
n.SetType(types.Types[types.TUNSAFEPTR])
n = ir.NewConvExpr(base.Pos, ir.OCONV, nil, n)
@@ -2073,25 +2032,25 @@ func walkprint(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
}
fallthrough
case types.TCHAN, types.TMAP, types.TFUNC, types.TUNSAFEPTR:
- on = syslook("printpointer")
- on = substArgTypes(on, n.Type()) // any-1
+ on = typecheck.LookupRuntime("printpointer")
+ on = typecheck.SubstArgTypes(on, n.Type()) // any-1
case types.TSLICE:
- on = syslook("printslice")
- on = substArgTypes(on, n.Type()) // any-1
+ on = typecheck.LookupRuntime("printslice")
+ on = typecheck.SubstArgTypes(on, n.Type()) // any-1
case types.TUINT, types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINTPTR:
if types.IsRuntimePkg(n.Type().Sym().Pkg) && n.Type().Sym().Name == "hex" {
- on = syslook("printhex")
+ on = typecheck.LookupRuntime("printhex")
} else {
- on = syslook("printuint")
+ on = typecheck.LookupRuntime("printuint")
}
case types.TINT, types.TINT8, types.TINT16, types.TINT32, types.TINT64:
- on = syslook("printint")
+ on = typecheck.LookupRuntime("printint")
case types.TFLOAT32, types.TFLOAT64:
- on = syslook("printfloat")
+ on = typecheck.LookupRuntime("printfloat")
case types.TCOMPLEX64, types.TCOMPLEX128:
- on = syslook("printcomplex")
+ on = typecheck.LookupRuntime("printcomplex")
case types.TBOOL:
- on = syslook("printbool")
+ on = typecheck.LookupRuntime("printbool")
case types.TSTRING:
cs := ""
if ir.IsConst(n, constant.String) {
@@ -2099,11 +2058,11 @@ func walkprint(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
}
switch cs {
case " ":
- on = syslook("printsp")
+ on = typecheck.LookupRuntime("printsp")
case "\n":
- on = syslook("printnl")
+ on = typecheck.LookupRuntime("printnl")
default:
- on = syslook("printstring")
+ on = typecheck.LookupRuntime("printstring")
}
default:
badtype(ir.OPRINT, n.Type(), nil)
@@ -2124,12 +2083,12 @@ func walkprint(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
calls = append(calls, mkcall("printunlock", nil, init))
- typecheckslice(calls, ctxStmt)
+ typecheck.Stmts(calls)
walkexprlist(calls, init)
r := ir.NewBlockStmt(base.Pos, nil)
r.List.Set(calls)
- return walkstmt(typecheck(r, ctxStmt))
+ return walkstmt(typecheck.Stmt(r))
}
func callnew(t *types.Type) ir.Node {
@@ -2160,12 +2119,12 @@ func convas(n *ir.AssignStmt, init *ir.Nodes) *ir.AssignStmt {
}
if ir.IsBlank(n.X) {
- n.Y = defaultlit(n.Y, nil)
+ n.Y = typecheck.DefaultLit(n.Y, nil)
return n
}
if !types.Identical(lt, rt) {
- n.Y = assignconv(n.Y, lt, "assignment")
+ n.Y = typecheck.AssignConv(n.Y, lt, "assignment")
n.Y = walkexpr(n.Y, init)
}
types.CalcSize(n.Y.Type())
@@ -2258,8 +2217,8 @@ func reorder3save(n ir.Node, all []*ir.AssignStmt, i int, early *[]ir.Node) ir.N
return n
}
- q := ir.Node(temp(n.Type()))
- as := typecheck(ir.NewAssignStmt(base.Pos, q, n), ctxStmt)
+ q := ir.Node(typecheck.Temp(n.Type()))
+ as := typecheck.Stmt(ir.NewAssignStmt(base.Pos, q, n))
*early = append(*early, as)
return q
}
@@ -2455,7 +2414,7 @@ func paramstoheap(params *types.Type) []ir.Node {
if stackcopy := v.Name().Stackcopy; stackcopy != nil {
nn = append(nn, walkstmt(ir.NewDecl(base.Pos, ir.ODCL, v)))
if stackcopy.Class_ == ir.PPARAM {
- nn = append(nn, walkstmt(typecheck(ir.NewAssignStmt(base.Pos, v, stackcopy), ctxStmt)))
+ nn = append(nn, walkstmt(typecheck.Stmt(ir.NewAssignStmt(base.Pos, v, stackcopy))))
}
}
}
@@ -2503,7 +2462,7 @@ func returnsfromheap(params *types.Type) []ir.Node {
continue
}
if stackcopy := v.Name().Stackcopy; stackcopy != nil && stackcopy.Class_ == ir.PPARAMOUT {
- nn = append(nn, walkstmt(typecheck(ir.NewAssignStmt(base.Pos, stackcopy, v), ctxStmt)))
+ nn = append(nn, walkstmt(typecheck.Stmt(ir.NewAssignStmt(base.Pos, stackcopy, v))))
}
}
@@ -2536,41 +2495,19 @@ func vmkcall(fn ir.Node, t *types.Type, init *ir.Nodes, va []ir.Node) *ir.CallEx
}
call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, va)
- TypecheckCall(call)
+ typecheck.Call(call)
call.SetType(t)
return walkexpr(call, init).(*ir.CallExpr)
}
func mkcall(name string, t *types.Type, init *ir.Nodes, args ...ir.Node) *ir.CallExpr {
- return vmkcall(syslook(name), t, init, args)
+ return vmkcall(typecheck.LookupRuntime(name), t, init, args)
}
func mkcall1(fn ir.Node, t *types.Type, init *ir.Nodes, args ...ir.Node) *ir.CallExpr {
return vmkcall(fn, t, init, args)
}
-func conv(n ir.Node, t *types.Type) ir.Node {
- if types.Identical(n.Type(), t) {
- return n
- }
- n = ir.NewConvExpr(base.Pos, ir.OCONV, nil, n)
- n.SetType(t)
- n = typecheck(n, ctxExpr)
- return n
-}
-
-// convnop converts node n to type t using the OCONVNOP op
-// and typechecks the result with ctxExpr.
-func convnop(n ir.Node, t *types.Type) ir.Node {
- if types.Identical(n.Type(), t) {
- return n
- }
- n = ir.NewConvExpr(base.Pos, ir.OCONVNOP, nil, n)
- n.SetType(t)
- n = typecheck(n, ctxExpr)
- return n
-}
-
// byteindex converts n, which is byte-sized, to an int used to index into an array.
// We cannot use conv, because we allow converting bool to int here,
// which is forbidden in user code.
@@ -2594,14 +2531,14 @@ func chanfn(name string, n int, t *types.Type) ir.Node {
if !t.IsChan() {
base.Fatalf("chanfn %v", t)
}
- fn := syslook(name)
+ fn := typecheck.LookupRuntime(name)
switch n {
default:
base.Fatalf("chanfn %d", n)
case 1:
- fn = substArgTypes(fn, t.Elem())
+ fn = typecheck.SubstArgTypes(fn, t.Elem())
case 2:
- fn = substArgTypes(fn, t.Elem(), t.Elem())
+ fn = typecheck.SubstArgTypes(fn, t.Elem(), t.Elem())
}
return fn
}
@@ -2610,8 +2547,8 @@ func mapfn(name string, t *types.Type) ir.Node {
if !t.IsMap() {
base.Fatalf("mapfn %v", t)
}
- fn := syslook(name)
- fn = substArgTypes(fn, t.Key(), t.Elem(), t.Key(), t.Elem())
+ fn := typecheck.LookupRuntime(name)
+ fn = typecheck.SubstArgTypes(fn, t.Key(), t.Elem(), t.Key(), t.Elem())
return fn
}
@@ -2619,8 +2556,8 @@ func mapfndel(name string, t *types.Type) ir.Node {
if !t.IsMap() {
base.Fatalf("mapfn %v", t)
}
- fn := syslook(name)
- fn = substArgTypes(fn, t.Key(), t.Elem(), t.Key())
+ fn := typecheck.LookupRuntime(name)
+ fn = typecheck.SubstArgTypes(fn, t.Key(), t.Elem(), t.Key())
return fn
}
@@ -2675,8 +2612,8 @@ func mapfast(t *types.Type) int {
}
func writebarrierfn(name string, l *types.Type, r *types.Type) ir.Node {
- fn := syslook(name)
- fn = substArgTypes(fn, l, r)
+ fn := typecheck.LookupRuntime(name)
+ fn = typecheck.SubstArgTypes(fn, l, r)
return fn
}
@@ -2687,7 +2624,7 @@ func addstr(n *ir.AddStringExpr, init *ir.Nodes) ir.Node {
base.Fatalf("addstr count %d too small", c)
}
- buf := nodnil()
+ buf := typecheck.NodNil()
if n.Esc() == ir.EscNone {
sz := int64(0)
for _, n1 := range n.List {
@@ -2700,14 +2637,14 @@ func addstr(n *ir.AddStringExpr, init *ir.Nodes) ir.Node {
if sz < tmpstringbufsize {
// Create temporary buffer for result string on stack.
t := types.NewArray(types.Types[types.TUINT8], tmpstringbufsize)
- buf = nodAddr(temp(t))
+ buf = typecheck.NodAddr(typecheck.Temp(t))
}
}
// build list of string arguments
args := []ir.Node{buf}
for _, n2 := range n.List {
- args = append(args, conv(n2, types.Types[types.TSTRING]))
+ args = append(args, typecheck.Conv(n2, types.Types[types.TSTRING]))
}
var fn string
@@ -2727,10 +2664,10 @@ func addstr(n *ir.AddStringExpr, init *ir.Nodes) ir.Node {
slice.SetEsc(ir.EscNone)
}
- cat := syslook(fn)
+ cat := typecheck.LookupRuntime(fn)
r := ir.NewCallExpr(base.Pos, ir.OCALL, cat, nil)
r.Args.Set(args)
- r1 := typecheck(r, ctxExpr)
+ r1 := typecheck.Expr(r)
r1 = walkexpr(r1, init)
r1.SetType(n.Type())
@@ -2774,24 +2711,24 @@ func appendslice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
var nodes ir.Nodes
// var s []T
- s := temp(l1.Type())
+ s := typecheck.Temp(l1.Type())
nodes.Append(ir.NewAssignStmt(base.Pos, s, l1)) // s = l1
elemtype := s.Type().Elem()
// n := len(s) + len(l2)
- nn := temp(types.Types[types.TINT])
+ nn := typecheck.Temp(types.Types[types.TINT])
nodes.Append(ir.NewAssignStmt(base.Pos, nn, ir.NewBinaryExpr(base.Pos, ir.OADD, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), ir.NewUnaryExpr(base.Pos, ir.OLEN, l2))))
// if uint(n) > uint(cap(s))
nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
- nuint := conv(nn, types.Types[types.TUINT])
- scapuint := conv(ir.NewUnaryExpr(base.Pos, ir.OCAP, s), types.Types[types.TUINT])
+ nuint := typecheck.Conv(nn, types.Types[types.TUINT])
+ scapuint := typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OCAP, s), types.Types[types.TUINT])
nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OGT, nuint, scapuint)
// instantiate growslice(typ *type, []any, int) []any
- fn := syslook("growslice")
- fn = substArgTypes(fn, elemtype, elemtype)
+ fn := typecheck.LookupRuntime("growslice")
+ fn = typecheck.SubstArgTypes(fn, elemtype, elemtype)
// s = growslice(T, s, n)
nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, s, mkcall1(fn, s.Type(), nif.PtrInit(), typename(elemtype), s, nn))}
@@ -2813,8 +2750,8 @@ func appendslice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
ir.CurFunc.SetWBPos(n.Pos())
// instantiate typedslicecopy(typ *type, dstPtr *any, dstLen int, srcPtr *any, srcLen int) int
- fn := syslook("typedslicecopy")
- fn = substArgTypes(fn, l1.Type().Elem(), l2.Type().Elem())
+ fn := typecheck.LookupRuntime("typedslicecopy")
+ fn = typecheck.SubstArgTypes(fn, l1.Type().Elem(), l2.Type().Elem())
ptr1, len1 := backingArrayPtrLen(cheapexpr(slice, &nodes))
ptr2, len2 := backingArrayPtrLen(l2)
ncopy = mkcall1(fn, types.Types[types.TINT], &nodes, typename(elemtype), ptr1, len1, ptr2, len2)
@@ -2829,28 +2766,28 @@ func appendslice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
ptr1, len1 := backingArrayPtrLen(cheapexpr(slice, &nodes))
ptr2, len2 := backingArrayPtrLen(l2)
- fn := syslook("slicecopy")
- fn = substArgTypes(fn, ptr1.Type().Elem(), ptr2.Type().Elem())
+ fn := typecheck.LookupRuntime("slicecopy")
+ fn = typecheck.SubstArgTypes(fn, ptr1.Type().Elem(), ptr2.Type().Elem())
ncopy = mkcall1(fn, types.Types[types.TINT], &nodes, ptr1, len1, ptr2, len2, ir.NewInt(elemtype.Width))
} else {
// memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
ix := ir.NewIndexExpr(base.Pos, s, ir.NewUnaryExpr(base.Pos, ir.OLEN, l1))
ix.SetBounded(true)
- addr := nodAddr(ix)
+ addr := typecheck.NodAddr(ix)
sptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, l2)
- nwid := cheapexpr(conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, l2), types.Types[types.TUINTPTR]), &nodes)
+ nwid := cheapexpr(typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, l2), types.Types[types.TUINTPTR]), &nodes)
nwid = ir.NewBinaryExpr(base.Pos, ir.OMUL, nwid, ir.NewInt(elemtype.Width))
// instantiate func memmove(to *any, frm *any, length uintptr)
- fn := syslook("memmove")
- fn = substArgTypes(fn, elemtype, elemtype)
+ fn := typecheck.LookupRuntime("memmove")
+ fn = typecheck.SubstArgTypes(fn, elemtype, elemtype)
ncopy = mkcall1(fn, nil, &nodes, addr, sptr, nwid)
}
ln := append(nodes, ncopy)
- typecheckslice(ln, ctxStmt)
+ typecheck.Stmts(ln)
walkstmtlist(ln)
init.Append(ln...)
return s
@@ -2925,8 +2862,8 @@ func extendslice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
// isAppendOfMake made sure all possible positive values of l2 fit into an uint.
// The case of l2 overflow when converting from e.g. uint to int is handled by an explicit
// check of l2 < 0 at runtime which is generated below.
- l2 := conv(n.Args[1].(*ir.MakeExpr).Len, types.Types[types.TINT])
- l2 = typecheck(l2, ctxExpr)
+ l2 := typecheck.Conv(n.Args[1].(*ir.MakeExpr).Len, types.Types[types.TINT])
+ l2 = typecheck.Expr(l2)
n.Args[1] = l2 // walkAppendArgs expects l2 in n.List.Second().
walkAppendArgs(n, init)
@@ -2945,23 +2882,23 @@ func extendslice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
nodes = append(nodes, nifneg)
// s := l1
- s := temp(l1.Type())
+ s := typecheck.Temp(l1.Type())
nodes = append(nodes, ir.NewAssignStmt(base.Pos, s, l1))
elemtype := s.Type().Elem()
// n := len(s) + l2
- nn := temp(types.Types[types.TINT])
+ nn := typecheck.Temp(types.Types[types.TINT])
nodes = append(nodes, ir.NewAssignStmt(base.Pos, nn, ir.NewBinaryExpr(base.Pos, ir.OADD, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), l2)))
// if uint(n) > uint(cap(s))
- nuint := conv(nn, types.Types[types.TUINT])
- capuint := conv(ir.NewUnaryExpr(base.Pos, ir.OCAP, s), types.Types[types.TUINT])
+ nuint := typecheck.Conv(nn, types.Types[types.TUINT])
+ capuint := typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OCAP, s), types.Types[types.TUINT])
nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGT, nuint, capuint), nil, nil)
// instantiate growslice(typ *type, old []any, newcap int) []any
- fn := syslook("growslice")
- fn = substArgTypes(fn, elemtype, elemtype)
+ fn := typecheck.LookupRuntime("growslice")
+ fn = typecheck.SubstArgTypes(fn, elemtype, elemtype)
// s = growslice(T, s, n)
nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, s, mkcall1(fn, s.Type(), nif.PtrInit(), typename(elemtype), s, nn))}
@@ -2974,22 +2911,22 @@ func extendslice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
nodes = append(nodes, ir.NewAssignStmt(base.Pos, s, nt))
// lptr := &l1[0]
- l1ptr := temp(l1.Type().Elem().PtrTo())
+ l1ptr := typecheck.Temp(l1.Type().Elem().PtrTo())
tmp := ir.NewUnaryExpr(base.Pos, ir.OSPTR, l1)
nodes = append(nodes, ir.NewAssignStmt(base.Pos, l1ptr, tmp))
// sptr := &s[0]
- sptr := temp(elemtype.PtrTo())
+ sptr := typecheck.Temp(elemtype.PtrTo())
tmp = ir.NewUnaryExpr(base.Pos, ir.OSPTR, s)
nodes = append(nodes, ir.NewAssignStmt(base.Pos, sptr, tmp))
// hp := &s[len(l1)]
ix := ir.NewIndexExpr(base.Pos, s, ir.NewUnaryExpr(base.Pos, ir.OLEN, l1))
ix.SetBounded(true)
- hp := convnop(nodAddr(ix), types.Types[types.TUNSAFEPTR])
+ hp := typecheck.ConvNop(typecheck.NodAddr(ix), types.Types[types.TUNSAFEPTR])
// hn := l2 * sizeof(elem(s))
- hn := conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, l2, ir.NewInt(elemtype.Width)), types.Types[types.TUINTPTR])
+ hn := typecheck.Conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, l2, ir.NewInt(elemtype.Width)), types.Types[types.TUINTPTR])
clrname := "memclrNoHeapPointers"
hasPointers := elemtype.HasPointers()
@@ -3011,7 +2948,7 @@ func extendslice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
nodes = append(nodes, clr...)
}
- typecheckslice(nodes, ctxStmt)
+ typecheck.Stmts(nodes)
walkstmtlist(nodes)
init.Append(nodes...)
return s
@@ -3057,7 +2994,7 @@ func walkappend(n *ir.CallExpr, init *ir.Nodes, dst ir.Node) ir.Node {
for i, n := range ls {
n = cheapexpr(n, init)
if !types.Identical(n.Type(), nsrc.Type().Elem()) {
- n = assignconv(n, nsrc.Type().Elem(), "append")
+ n = typecheck.AssignConv(n, nsrc.Type().Elem(), "append")
n = walkexpr(n, init)
}
ls[i] = n
@@ -3076,22 +3013,22 @@ func walkappend(n *ir.CallExpr, init *ir.Nodes, dst ir.Node) ir.Node {
var l []ir.Node
- ns := temp(nsrc.Type())
+ ns := typecheck.Temp(nsrc.Type())
l = append(l, ir.NewAssignStmt(base.Pos, ns, nsrc)) // s = src
na := ir.NewInt(int64(argc)) // const argc
nif := ir.NewIfStmt(base.Pos, nil, nil, nil) // if cap(s) - len(s) < argc
nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, ir.NewBinaryExpr(base.Pos, ir.OSUB, ir.NewUnaryExpr(base.Pos, ir.OCAP, ns), ir.NewUnaryExpr(base.Pos, ir.OLEN, ns)), na)
- fn := syslook("growslice") // growslice(<type>, old []T, mincap int) (ret []T)
- fn = substArgTypes(fn, ns.Type().Elem(), ns.Type().Elem())
+ fn := typecheck.LookupRuntime("growslice") // growslice(<type>, old []T, mincap int) (ret []T)
+ fn = typecheck.SubstArgTypes(fn, ns.Type().Elem(), ns.Type().Elem())
nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, ns, mkcall1(fn, ns.Type(), nif.PtrInit(), typename(ns.Type().Elem()), ns,
ir.NewBinaryExpr(base.Pos, ir.OADD, ir.NewUnaryExpr(base.Pos, ir.OLEN, ns), na)))}
l = append(l, nif)
- nn := temp(types.Types[types.TINT])
+ nn := typecheck.Temp(types.Types[types.TINT])
l = append(l, ir.NewAssignStmt(base.Pos, nn, ir.NewUnaryExpr(base.Pos, ir.OLEN, ns))) // n = len(s)
slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, ns) // ...s[:n+argc]
@@ -3109,7 +3046,7 @@ func walkappend(n *ir.CallExpr, init *ir.Nodes, dst ir.Node) ir.Node {
}
}
- typecheckslice(l, ctxStmt)
+ typecheck.Stmts(l)
walkstmtlist(l)
init.Append(l...)
return ns
@@ -3147,16 +3084,16 @@ func copyany(n *ir.BinaryExpr, init *ir.Nodes, runtimecall bool) ir.Node {
n.Y = cheapexpr(n.Y, init)
ptrR, lenR := backingArrayPtrLen(n.Y)
- fn := syslook("slicecopy")
- fn = substArgTypes(fn, ptrL.Type().Elem(), ptrR.Type().Elem())
+ fn := typecheck.LookupRuntime("slicecopy")
+ fn = typecheck.SubstArgTypes(fn, ptrL.Type().Elem(), ptrR.Type().Elem())
return mkcall1(fn, n.Type(), init, ptrL, lenL, ptrR, lenR, ir.NewInt(n.X.Type().Elem().Width))
}
n.X = walkexpr(n.X, init)
n.Y = walkexpr(n.Y, init)
- nl := temp(n.X.Type())
- nr := temp(n.Y.Type())
+ nl := typecheck.Temp(n.X.Type())
+ nr := typecheck.Temp(n.Y.Type())
var l []ir.Node
l = append(l, ir.NewAssignStmt(base.Pos, nl, n.X))
l = append(l, ir.NewAssignStmt(base.Pos, nr, n.Y))
@@ -3164,7 +3101,7 @@ func copyany(n *ir.BinaryExpr, init *ir.Nodes, runtimecall bool) ir.Node {
nfrm := ir.NewUnaryExpr(base.Pos, ir.OSPTR, nr)
nto := ir.NewUnaryExpr(base.Pos, ir.OSPTR, nl)
- nlen := temp(types.Types[types.TINT])
+ nlen := typecheck.Temp(types.Types[types.TINT])
// n = len(to)
l = append(l, ir.NewAssignStmt(base.Pos, nlen, ir.NewUnaryExpr(base.Pos, ir.OLEN, nl)))
@@ -3181,16 +3118,16 @@ func copyany(n *ir.BinaryExpr, init *ir.Nodes, runtimecall bool) ir.Node {
ne.Likely = true
l = append(l, ne)
- fn := syslook("memmove")
- fn = substArgTypes(fn, nl.Type().Elem(), nl.Type().Elem())
- nwid := ir.Node(temp(types.Types[types.TUINTPTR]))
- setwid := ir.NewAssignStmt(base.Pos, nwid, conv(nlen, types.Types[types.TUINTPTR]))
+ fn := typecheck.LookupRuntime("memmove")
+ fn = typecheck.SubstArgTypes(fn, nl.Type().Elem(), nl.Type().Elem())
+ nwid := ir.Node(typecheck.Temp(types.Types[types.TUINTPTR]))
+ setwid := ir.NewAssignStmt(base.Pos, nwid, typecheck.Conv(nlen, types.Types[types.TUINTPTR]))
ne.Body.Append(setwid)
nwid = ir.NewBinaryExpr(base.Pos, ir.OMUL, nwid, ir.NewInt(nl.Type().Elem().Width))
call := mkcall1(fn, nil, init, nto, nfrm, nwid)
ne.Body.Append(call)
- typecheckslice(l, ctxStmt)
+ typecheck.Stmts(l)
walkstmtlist(l)
init.Append(l...)
return nlen
@@ -3203,14 +3140,14 @@ func eqfor(t *types.Type) (n ir.Node, needsize bool) {
// is handled by walkcompare.
switch a, _ := types.AlgType(t); a {
case types.AMEM:
- n := syslook("memequal")
- n = substArgTypes(n, t, t)
+ n := typecheck.LookupRuntime("memequal")
+ n = typecheck.SubstArgTypes(n, t, t)
return n, true
case types.ASPECIAL:
sym := typesymprefix(".eq", t)
- n := NewName(sym)
+ n := typecheck.NewName(sym)
ir.MarkFunc(n)
- n.SetType(functype(nil, []*ir.Field{
+ n.SetType(typecheck.NewFuncType(nil, []*ir.Field{
ir.NewField(base.Pos, nil, nil, types.NewPtr(t)),
ir.NewField(base.Pos, nil, nil, types.NewPtr(t)),
}, []*ir.Field{
@@ -3267,7 +3204,7 @@ func walkcompare(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
tab.SetTypecheck(1)
eqtype = ir.NewBinaryExpr(base.Pos, eq, tab, rtyp)
} else {
- nonnil := ir.NewBinaryExpr(base.Pos, brcom(eq), nodnil(), tab)
+ nonnil := ir.NewBinaryExpr(base.Pos, brcom(eq), typecheck.NodNil(), tab)
match := ir.NewBinaryExpr(base.Pos, eq, itabType(tab), rtyp)
eqtype = ir.NewLogicalExpr(base.Pos, andor, nonnil, match)
}
@@ -3366,8 +3303,8 @@ func walkcompare(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
fn, needsize := eqfor(t)
call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, nil)
- call.Args.Append(nodAddr(cmpl))
- call.Args.Append(nodAddr(cmpr))
+ call.Args.Append(typecheck.NodAddr(cmpl))
+ call.Args.Append(typecheck.NodAddr(cmpr))
if needsize {
call.Args.Append(ir.NewInt(t.Width))
}
@@ -3436,22 +3373,22 @@ func walkcompare(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
} else {
elemType := t.Elem().ToUnsigned()
cmplw := ir.Node(ir.NewIndexExpr(base.Pos, cmpl, ir.NewInt(i)))
- cmplw = conv(cmplw, elemType) // convert to unsigned
- cmplw = conv(cmplw, convType) // widen
+ cmplw = typecheck.Conv(cmplw, elemType) // convert to unsigned
+ cmplw = typecheck.Conv(cmplw, convType) // widen
cmprw := ir.Node(ir.NewIndexExpr(base.Pos, cmpr, ir.NewInt(i)))
- cmprw = conv(cmprw, elemType)
- cmprw = conv(cmprw, convType)
+ cmprw = typecheck.Conv(cmprw, elemType)
+ cmprw = typecheck.Conv(cmprw, convType)
// For code like this: uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ...
// ssa will generate a single large load.
for offset := int64(1); offset < step; offset++ {
lb := ir.Node(ir.NewIndexExpr(base.Pos, cmpl, ir.NewInt(i+offset)))
- lb = conv(lb, elemType)
- lb = conv(lb, convType)
+ lb = typecheck.Conv(lb, elemType)
+ lb = typecheck.Conv(lb, convType)
lb = ir.NewBinaryExpr(base.Pos, ir.OLSH, lb, ir.NewInt(8*t.Elem().Width*offset))
cmplw = ir.NewBinaryExpr(base.Pos, ir.OOR, cmplw, lb)
rb := ir.Node(ir.NewIndexExpr(base.Pos, cmpr, ir.NewInt(i+offset)))
- rb = conv(rb, elemType)
- rb = conv(rb, convType)
+ rb = typecheck.Conv(rb, elemType)
+ rb = typecheck.Conv(rb, convType)
rb = ir.NewBinaryExpr(base.Pos, ir.OLSH, rb, ir.NewInt(8*t.Elem().Width*offset))
cmprw = ir.NewBinaryExpr(base.Pos, ir.OOR, cmprw, rb)
}
@@ -3465,9 +3402,9 @@ func walkcompare(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
expr = ir.NewBool(n.Op() == ir.OEQ)
// We still need to use cmpl and cmpr, in case they contain
// an expression which might panic. See issue 23837.
- t := temp(cmpl.Type())
- a1 := typecheck(ir.NewAssignStmt(base.Pos, t, cmpl), ctxStmt)
- a2 := typecheck(ir.NewAssignStmt(base.Pos, t, cmpr), ctxStmt)
+ t := typecheck.Temp(cmpl.Type())
+ a1 := typecheck.Stmt(ir.NewAssignStmt(base.Pos, t, cmpl))
+ a2 := typecheck.Stmt(ir.NewAssignStmt(base.Pos, t, cmpr))
init.Append(a1, a2)
}
return finishcompare(n, expr, init)
@@ -3479,7 +3416,7 @@ func tracecmpArg(n ir.Node, t *types.Type, init *ir.Nodes) ir.Node {
n = copyexpr(n, n.Type(), init)
}
- return conv(n, t)
+ return typecheck.Conv(n, t)
}
func walkcompareInterface(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
@@ -3573,13 +3510,13 @@ func walkcompareString(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
convType = types.Types[types.TUINT16]
step = 2
}
- ncsubstr := conv(ir.NewIndexExpr(base.Pos, ncs, ir.NewInt(int64(i))), convType)
+ ncsubstr := typecheck.Conv(ir.NewIndexExpr(base.Pos, ncs, ir.NewInt(int64(i))), convType)
csubstr := int64(s[i])
// Calculate large constant from bytes as sequence of shifts and ors.
// Like this: uint32(s[0]) | uint32(s[1])<<8 | uint32(s[2])<<16 ...
// ssa will combine this into a single large load.
for offset := 1; offset < step; offset++ {
- b := conv(ir.NewIndexExpr(base.Pos, ncs, ir.NewInt(int64(i+offset))), convType)
+ b := typecheck.Conv(ir.NewIndexExpr(base.Pos, ncs, ir.NewInt(int64(i+offset))), convType)
b = ir.NewBinaryExpr(base.Pos, ir.OLSH, b, ir.NewInt(int64(8*offset)))
ncsubstr = ir.NewBinaryExpr(base.Pos, ir.OOR, ncsubstr, b)
csubstr |= int64(s[i+offset]) << uint8(8*offset)
@@ -3612,7 +3549,7 @@ func walkcompareString(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
}
} else {
// sys_cmpstring(s1, s2) :: 0
- r = mkcall("cmpstring", types.Types[types.TINT], init, conv(n.X, types.Types[types.TSTRING]), conv(n.Y, types.Types[types.TSTRING]))
+ r = mkcall("cmpstring", types.Types[types.TINT], init, typecheck.Conv(n.X, types.Types[types.TSTRING]), typecheck.Conv(n.Y, types.Types[types.TSTRING]))
r = ir.NewBinaryExpr(base.Pos, n.Op(), r, ir.NewInt(0))
}
@@ -3622,8 +3559,8 @@ func walkcompareString(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
// The result of finishcompare MUST be assigned back to n, e.g.
// n.Left = finishcompare(n.Left, x, r, init)
func finishcompare(n *ir.BinaryExpr, r ir.Node, init *ir.Nodes) ir.Node {
- r = typecheck(r, ctxExpr)
- r = conv(r, n.Type())
+ r = typecheck.Expr(r)
+ r = typecheck.Conv(r, n.Type())
r = walkexpr(r, init)
return r
}
@@ -3926,7 +3863,7 @@ func wrapCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
origArgs := make([]ir.Node, len(n.Args))
var funcArgs []*ir.Field
for i, arg := range n.Args {
- s := lookupN("a", i)
+ s := typecheck.LookupNum("a", i)
if !isBuiltinCall && arg.Op() == ir.OCONVNOP && arg.Type().IsUintptr() && arg.(*ir.ConvExpr).X.Type().IsUnsafePtr() {
origArgs[i] = arg
arg = arg.(*ir.ConvExpr).X
@@ -3937,8 +3874,8 @@ func wrapCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
t := ir.NewFuncType(base.Pos, nil, funcArgs, nil)
wrapCall_prgen++
- sym := lookupN("wrap·", wrapCall_prgen)
- fn := dclfunc(sym, t)
+ sym := typecheck.LookupNum("wrap·", wrapCall_prgen)
+ fn := typecheck.DeclFunc(sym, t)
args := ir.ParamNames(t.Type())
for i, origArg := range origArgs {
@@ -3954,32 +3891,14 @@ func wrapCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
}
fn.Body = []ir.Node{call}
- funcbody()
+ typecheck.FinishFuncBody()
- typecheckFunc(fn)
- typecheckslice(fn.Body, ctxStmt)
- Target.Decls = append(Target.Decls, fn)
+ typecheck.Func(fn)
+ typecheck.Stmts(fn.Body)
+ typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
call = ir.NewCallExpr(base.Pos, ir.OCALL, fn.Nname, n.Args)
- return walkexpr(typecheck(call, ctxStmt), init)
-}
-
-// substArgTypes substitutes the given list of types for
-// successive occurrences of the "any" placeholder in the
-// type syntax expression n.Type.
-// The result of substArgTypes MUST be assigned back to old, e.g.
-// n.Left = substArgTypes(n.Left, t1, t2)
-func substArgTypes(old *ir.Name, types_ ...*types.Type) *ir.Name {
- n := old.CloneName()
-
- for _, t := range types_ {
- types.CalcSize(t)
- }
- n.SetType(types.SubstAny(n.Type(), &types_))
- if len(types_) > 0 {
- base.Fatalf("substArgTypes: too many argument types")
- }
- return n
+ return walkexpr(typecheck.Stmt(call), init)
}
// canMergeLoads reports whether the backend optimization passes for
@@ -4025,7 +3944,7 @@ func walkCheckPtrAlignment(n *ir.ConvExpr, init *ir.Nodes, count ir.Node) ir.Nod
}
n.X = cheapexpr(n.X, init)
- init.Append(mkcall("checkptrAlignment", nil, init, convnop(n.X, types.Types[types.TUNSAFEPTR]), typename(elem), conv(count, types.Types[types.TUINTPTR])))
+ init.Append(mkcall("checkptrAlignment", nil, init, typecheck.ConvNop(n.X, types.Types[types.TUNSAFEPTR]), typename(elem), typecheck.Conv(count, types.Types[types.TUINTPTR])))
return n
}
@@ -4077,7 +3996,7 @@ func walkCheckPtrArithmetic(n *ir.ConvExpr, init *ir.Nodes) ir.Node {
n := n.(*ir.ConvExpr)
if n.X.Type().IsUnsafePtr() {
n.X = cheapexpr(n.X, init)
- originals = append(originals, convnop(n.X, types.Types[types.TUNSAFEPTR]))
+ originals = append(originals, typecheck.ConvNop(n.X, types.Types[types.TUNSAFEPTR]))
}
}
}
@@ -4085,10 +4004,10 @@ func walkCheckPtrArithmetic(n *ir.ConvExpr, init *ir.Nodes) ir.Node {
cheap := cheapexpr(n, init)
- slice := mkdotargslice(types.NewSlice(types.Types[types.TUNSAFEPTR]), originals)
+ slice := typecheck.MakeDotArgs(types.NewSlice(types.Types[types.TUNSAFEPTR]), originals)
slice.SetEsc(ir.EscNone)
- init.Append(mkcall("checkptrArithmetic", nil, init, convnop(cheap, types.Types[types.TUNSAFEPTR]), slice))
+ init.Append(mkcall("checkptrArithmetic", nil, init, typecheck.ConvNop(cheap, types.Types[types.TUNSAFEPTR]), slice))
// TODO(khr): Mark backing store of slice as dead. This will allow us to reuse
// the backing store for multiple calls to checkptrArithmetic.
@@ -4098,7 +4017,7 @@ func walkCheckPtrArithmetic(n *ir.ConvExpr, init *ir.Nodes) ir.Node {
// appendWalkStmt typechecks and walks stmt and then appends it to init.
func appendWalkStmt(init *ir.Nodes, stmt ir.Node) {
op := stmt.Op()
- n := typecheck(stmt, ctxStmt)
+ n := typecheck.Stmt(stmt)
if op == ir.OAS || op == ir.OAS2 {
// If the assignment has side effects, walkexpr will append them
// directly to init for us, while walkstmt will wrap it in an OBLOCK.