aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/compile/internal/typecheck/iexport.go
diff options
context:
space:
mode:
authorRuss Cox <rsc@golang.org>2020-12-23 00:41:49 -0500
committerRuss Cox <rsc@golang.org>2020-12-23 06:38:26 +0000
commitb9693d7627089204e6c2448f543c3512d86dae70 (patch)
tree0f869f5abb58568525f47d330e93ef36e1467bf2 /src/cmd/compile/internal/typecheck/iexport.go
parentdac0de3748cc816352da56f516506f80c33db4a5 (diff)
downloadgo-b9693d7627089204e6c2448f543c3512d86dae70.tar.gz
go-b9693d7627089204e6c2448f543c3512d86dae70.zip
[dev.regabi] cmd/compile: split out package typecheck [generated]
This commit splits the typechecking logic into its own package, the first of a sequence of CLs to break package gc into more manageable units. [git-generate] cd src/cmd/compile/internal/gc rf ' # The binary import/export has to be part of typechecking, # because we load inlined function bodies lazily, but "exporter" # should not be. Move that out of bexport.go. mv exporter exporter.markObject exporter.markType export.go # Use the typechecking helpers, so that the calls left behind # in package gc do not need access to ctxExpr etc. ex { import "cmd/compile/internal/ir" # TODO(rsc): Should not be necessary. avoid TypecheckExpr avoid TypecheckStmt avoid TypecheckExprs avoid TypecheckStmts avoid TypecheckAssignExpr avoid TypecheckCallee var n ir.Node var ns []ir.Node typecheck(n, ctxExpr) -> TypecheckExpr(n) typecheck(n, ctxStmt) -> TypecheckStmt(n) typecheckslice(ns, ctxExpr) -> TypecheckExprs(ns) typecheckslice(ns, ctxStmt) -> TypecheckStmts(ns) typecheck(n, ctxExpr|ctxAssign) -> TypecheckAssignExpr(n) typecheck(n, ctxExpr|ctxCallee) -> TypecheckCallee(n) } # Move some typechecking API to typecheck. mv syslook LookupRuntime mv substArgTypes SubstArgTypes mv LookupRuntime SubstArgTypes syms.go mv conv Conv mv convnop ConvNop mv Conv ConvNop typecheck.go mv colasdefn AssignDefn mv colasname assignableName mv Target target.go mv initname autoexport exportsym dcl.go mv exportsym Export # Export API to be called from outside typecheck. # The ones with "Typecheck" prefixes will be renamed later to drop the prefix. mv adddot AddImplicitDots mv assignconv AssignConv mv expandmeth CalcMethods mv capturevarscomplete CaptureVarsComplete mv checkMapKeys CheckMapKeys mv checkreturn CheckReturn mv dclcontext DeclContext mv dclfunc DeclFunc mv declare Declare mv dotImportRefs DotImportRefs mv declImporter DeclImporter mv variter DeclVars mv defaultlit DefaultLit mv evalConst EvalConst mv expandInline ImportBody mv finishUniverse declareUniverse mv funcbody FinishFuncBody mv funchdr StartFuncBody mv indexconst IndexConst mv initTodo InitTodoFunc mv lookup Lookup mv resolve Resolve mv lookupN LookupNum mv nodAddr NodAddr mv nodAddrAt NodAddrAt mv nodnil NodNil mv origBoolConst OrigBool mv origConst OrigConst mv origIntConst OrigInt mv redeclare Redeclared mv tostruct NewStructType mv functype NewFuncType mv methodfunc NewMethodType mv structargs NewFuncParams mv temp Temp mv tempAt TempAt mv typecheckok TypecheckAllowed mv typecheck _typecheck # make room for typecheck pkg mv typecheckinl TypecheckImportedBody mv typecheckFunc TypecheckFunc mv iimport ReadImports mv iexport WriteExports mv sysfunc LookupRuntimeFunc mv sysvar LookupRuntimeVar # Move function constructors to typecheck. mv mkdotargslice MakeDotArgs mv fixVariadicCall FixVariadicCall mv closureType ClosureType mv partialCallType PartialCallType mv capturevars CaptureVars mv MakeDotArgs FixVariadicCall ClosureType PartialCallType CaptureVars typecheckclosure func.go mv autolabel AutoLabel mv AutoLabel syms.go mv Dlist dlist mv Symlink symlink mv \ AssignDefn assignableName \ AssignConv \ CaptureVarsComplete \ DeclContext \ DeclFunc \ DeclImporter \ DeclVars \ Declare \ DotImportRefs \ Export \ InitTodoFunc \ Lookup \ LookupNum \ LookupRuntimeFunc \ LookupRuntimeVar \ NewFuncParams \ NewName \ NodAddr \ NodAddrAt \ NodNil \ Redeclared \ StartFuncBody \ FinishFuncBody \ TypecheckImportedBody \ AddImplicitDots \ CalcMethods \ CheckFuncStack \ NewFuncType \ NewMethodType \ NewStructType \ TypecheckAllowed \ Temp \ TempAt \ adddot1 \ dotlist \ addmethod \ assignconvfn \ assignop \ autotmpname \ autoexport \ bexport.go \ checkdupfields \ checkembeddedtype \ closurename \ convertop \ declare_typegen \ decldepth \ dlist \ dotpath \ expand0 \ expand1 \ expandDecl \ fakeRecvField \ fnpkg \ funcStack \ funcStackEnt \ funcarg \ funcarg2 \ funcargs \ funcargs2 \ globClosgen \ ifacelookdot \ implements \ importalias \ importconst \ importfunc \ importobj \ importsym \ importtype \ importvar \ inimport \ initname \ isptrto \ loadsys \ lookdot0 \ lookdot1 \ makepartialcall \ okfor \ okforlen \ operandType \ slist \ symlink \ tointerface \ typeSet \ typeSet.add \ typeSetEntry \ typecheckExprSwitch \ typecheckTypeSwitch \ typecheckpartialcall \ typecheckrange \ typecheckrangeExpr \ typecheckselect \ typecheckswitch \ vargen \ builtin.go \ builtin_test.go \ const.go \ func.go \ iexport.go \ iimport.go \ mapfile_mmap.go \ syms.go \ target.go \ typecheck.go \ unsafe.go \ universe.go \ cmd/compile/internal/typecheck ' rm gen.go types.go types_acc.go sed -i '' 's/package gc/package typecheck/' mapfile_read.go mkbuiltin.go mv mapfile_read.go ../typecheck # not part of default build mv mkbuiltin.go ../typecheck # package main helper mv builtin ../typecheck cd ../typecheck mv dcl.go dcl1.go mv typecheck.go typecheck1.go mv universe.go universe1.go rf ' # Sweep some small files into larger ones. # "mv sym... file1.go file.go" (after the mv file1.go file.go above) # lets us insert sym... at the top of file.go. mv okfor okforeq universe1.go universe.go mv DeclContext vargen dcl1.go Temp TempAt autotmpname NewMethodType dcl.go mv InitTodoFunc inimport decldepth TypecheckAllowed typecheck1.go typecheck.go mv inl.go closure.go func.go mv range.go select.go swt.go stmt.go mv Lookup loadsys LookupRuntimeFunc LookupRuntimeVar syms.go mv unsafe.go const.go mv TypecheckAssignExpr AssignExpr mv TypecheckExpr Expr mv TypecheckStmt Stmt mv TypecheckExprs Exprs mv TypecheckStmts Stmts mv TypecheckCall Call mv TypecheckCallee Callee mv _typecheck check mv TypecheckFunc Func mv TypecheckFuncBody FuncBody mv TypecheckImports AllImportedBodies mv TypecheckImportedBody ImportedBody mv TypecheckInit Init mv TypecheckPackage Package ' rm gen.go go.go init.go main.go reflect.go Change-Id: Iea6a7aaf6407d690670ec58aeb36cc0b280f80b0 Reviewed-on: https://go-review.googlesource.com/c/go/+/279236 Trust: Russ Cox <rsc@golang.org> Run-TryBot: Russ Cox <rsc@golang.org> TryBot-Result: Go Bot <gobot@golang.org> Reviewed-by: Matthew Dempsky <mdempsky@google.com>
Diffstat (limited to 'src/cmd/compile/internal/typecheck/iexport.go')
-rw-r--r--src/cmd/compile/internal/typecheck/iexport.go1614
1 files changed, 1614 insertions, 0 deletions
diff --git a/src/cmd/compile/internal/typecheck/iexport.go b/src/cmd/compile/internal/typecheck/iexport.go
new file mode 100644
index 0000000000..4ddee01b5a
--- /dev/null
+++ b/src/cmd/compile/internal/typecheck/iexport.go
@@ -0,0 +1,1614 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Indexed package export.
+//
+// The indexed export data format is an evolution of the previous
+// binary export data format. Its chief contribution is introducing an
+// index table, which allows efficient random access of individual
+// declarations and inline function bodies. In turn, this allows
+// avoiding unnecessary work for compilation units that import large
+// packages.
+//
+//
+// The top-level data format is structured as:
+//
+// Header struct {
+// Tag byte // 'i'
+// Version uvarint
+// StringSize uvarint
+// DataSize uvarint
+// }
+//
+// Strings [StringSize]byte
+// Data [DataSize]byte
+//
+// MainIndex []struct{
+// PkgPath stringOff
+// PkgName stringOff
+// PkgHeight uvarint
+//
+// Decls []struct{
+// Name stringOff
+// Offset declOff
+// }
+// }
+//
+// Fingerprint [8]byte
+//
+// uvarint means a uint64 written out using uvarint encoding.
+//
+// []T means a uvarint followed by that many T objects. In other
+// words:
+//
+// Len uvarint
+// Elems [Len]T
+//
+// stringOff means a uvarint that indicates an offset within the
+// Strings section. At that offset is another uvarint, followed by
+// that many bytes, which form the string value.
+//
+// declOff means a uvarint that indicates an offset within the Data
+// section where the associated declaration can be found.
+//
+//
+// There are five kinds of declarations, distinguished by their first
+// byte:
+//
+// type Var struct {
+// Tag byte // 'V'
+// Pos Pos
+// Type typeOff
+// }
+//
+// type Func struct {
+// Tag byte // 'F'
+// Pos Pos
+// Signature Signature
+// }
+//
+// type Const struct {
+// Tag byte // 'C'
+// Pos Pos
+// Value Value
+// }
+//
+// type Type struct {
+// Tag byte // 'T'
+// Pos Pos
+// Underlying typeOff
+//
+// Methods []struct{ // omitted if Underlying is an interface type
+// Pos Pos
+// Name stringOff
+// Recv Param
+// Signature Signature
+// }
+// }
+//
+// type Alias struct {
+// Tag byte // 'A'
+// Pos Pos
+// Type typeOff
+// }
+//
+//
+// typeOff means a uvarint that either indicates a predeclared type,
+// or an offset into the Data section. If the uvarint is less than
+// predeclReserved, then it indicates the index into the predeclared
+// types list (see predeclared in bexport.go for order). Otherwise,
+// subtracting predeclReserved yields the offset of a type descriptor.
+//
+// Value means a type and type-specific value. See
+// (*exportWriter).value for details.
+//
+//
+// There are nine kinds of type descriptors, distinguished by an itag:
+//
+// type DefinedType struct {
+// Tag itag // definedType
+// Name stringOff
+// PkgPath stringOff
+// }
+//
+// type PointerType struct {
+// Tag itag // pointerType
+// Elem typeOff
+// }
+//
+// type SliceType struct {
+// Tag itag // sliceType
+// Elem typeOff
+// }
+//
+// type ArrayType struct {
+// Tag itag // arrayType
+// Len uint64
+// Elem typeOff
+// }
+//
+// type ChanType struct {
+// Tag itag // chanType
+// Dir uint64 // 1 RecvOnly; 2 SendOnly; 3 SendRecv
+// Elem typeOff
+// }
+//
+// type MapType struct {
+// Tag itag // mapType
+// Key typeOff
+// Elem typeOff
+// }
+//
+// type FuncType struct {
+// Tag itag // signatureType
+// PkgPath stringOff
+// Signature Signature
+// }
+//
+// type StructType struct {
+// Tag itag // structType
+// PkgPath stringOff
+// Fields []struct {
+// Pos Pos
+// Name stringOff
+// Type typeOff
+// Embedded bool
+// Note stringOff
+// }
+// }
+//
+// type InterfaceType struct {
+// Tag itag // interfaceType
+// PkgPath stringOff
+// Embeddeds []struct {
+// Pos Pos
+// Type typeOff
+// }
+// Methods []struct {
+// Pos Pos
+// Name stringOff
+// Signature Signature
+// }
+// }
+//
+//
+// type Signature struct {
+// Params []Param
+// Results []Param
+// Variadic bool // omitted if Results is empty
+// }
+//
+// type Param struct {
+// Pos Pos
+// Name stringOff
+// Type typOff
+// }
+//
+//
+// Pos encodes a file:line:column triple, incorporating a simple delta
+// encoding scheme within a data object. See exportWriter.pos for
+// details.
+//
+//
+// Compiler-specific details.
+//
+// cmd/compile writes out a second index for inline bodies and also
+// appends additional compiler-specific details after declarations.
+// Third-party tools are not expected to depend on these details and
+// they're expected to change much more rapidly, so they're omitted
+// here. See exportWriter's varExt/funcExt/etc methods for details.
+
+package typecheck
+
+import (
+ "bufio"
+ "bytes"
+ "crypto/md5"
+ "encoding/binary"
+ "fmt"
+ "go/constant"
+ "io"
+ "math/big"
+ "sort"
+ "strings"
+
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/types"
+ "cmd/internal/goobj"
+ "cmd/internal/src"
+)
+
+// Current indexed export format version. Increase with each format change.
+// 1: added column details to Pos
+// 0: Go1.11 encoding
+const iexportVersion = 1
+
+// predeclReserved is the number of type offsets reserved for types
+// implicitly declared in the universe block.
+const predeclReserved = 32
+
+// An itag distinguishes the kind of type that was written into the
+// indexed export format.
+type itag uint64
+
+const (
+ // Types
+ definedType itag = iota
+ pointerType
+ sliceType
+ arrayType
+ chanType
+ mapType
+ signatureType
+ structType
+ interfaceType
+)
+
+func WriteExports(out *bufio.Writer) {
+ p := iexporter{
+ allPkgs: map[*types.Pkg]bool{},
+ stringIndex: map[string]uint64{},
+ declIndex: map[*types.Sym]uint64{},
+ inlineIndex: map[*types.Sym]uint64{},
+ typIndex: map[*types.Type]uint64{},
+ }
+
+ for i, pt := range predeclared() {
+ p.typIndex[pt] = uint64(i)
+ }
+ if len(p.typIndex) > predeclReserved {
+ base.Fatalf("too many predeclared types: %d > %d", len(p.typIndex), predeclReserved)
+ }
+
+ // Initialize work queue with exported declarations.
+ for _, n := range Target.Exports {
+ p.pushDecl(n)
+ }
+
+ // Loop until no more work. We use a queue because while
+ // writing out inline bodies, we may discover additional
+ // declarations that are needed.
+ for !p.declTodo.Empty() {
+ p.doDecl(p.declTodo.PopLeft())
+ }
+
+ // Append indices to data0 section.
+ dataLen := uint64(p.data0.Len())
+ w := p.newWriter()
+ w.writeIndex(p.declIndex, true)
+ w.writeIndex(p.inlineIndex, false)
+ w.flush()
+
+ if *base.Flag.LowerV {
+ fmt.Printf("export: hdr strings %v, data %v, index %v\n", p.strings.Len(), dataLen, p.data0.Len())
+ }
+
+ // Assemble header.
+ var hdr intWriter
+ hdr.WriteByte('i')
+ hdr.uint64(iexportVersion)
+ hdr.uint64(uint64(p.strings.Len()))
+ hdr.uint64(dataLen)
+
+ // Flush output.
+ h := md5.New()
+ wr := io.MultiWriter(out, h)
+ io.Copy(wr, &hdr)
+ io.Copy(wr, &p.strings)
+ io.Copy(wr, &p.data0)
+
+ // Add fingerprint (used by linker object file).
+ // Attach this to the end, so tools (e.g. gcimporter) don't care.
+ copy(base.Ctxt.Fingerprint[:], h.Sum(nil)[:])
+ out.Write(base.Ctxt.Fingerprint[:])
+}
+
+// writeIndex writes out a symbol index. mainIndex indicates whether
+// we're writing out the main index, which is also read by
+// non-compiler tools and includes a complete package description
+// (i.e., name and height).
+func (w *exportWriter) writeIndex(index map[*types.Sym]uint64, mainIndex bool) {
+ // Build a map from packages to symbols from that package.
+ pkgSyms := map[*types.Pkg][]*types.Sym{}
+
+ // For the main index, make sure to include every package that
+ // we reference, even if we're not exporting (or reexporting)
+ // any symbols from it.
+ if mainIndex {
+ pkgSyms[types.LocalPkg] = nil
+ for pkg := range w.p.allPkgs {
+ pkgSyms[pkg] = nil
+ }
+ }
+
+ // Group symbols by package.
+ for sym := range index {
+ pkgSyms[sym.Pkg] = append(pkgSyms[sym.Pkg], sym)
+ }
+
+ // Sort packages by path.
+ var pkgs []*types.Pkg
+ for pkg := range pkgSyms {
+ pkgs = append(pkgs, pkg)
+ }
+ sort.Slice(pkgs, func(i, j int) bool {
+ return pkgs[i].Path < pkgs[j].Path
+ })
+
+ w.uint64(uint64(len(pkgs)))
+ for _, pkg := range pkgs {
+ w.string(pkg.Path)
+ if mainIndex {
+ w.string(pkg.Name)
+ w.uint64(uint64(pkg.Height))
+ }
+
+ // Sort symbols within a package by name.
+ syms := pkgSyms[pkg]
+ sort.Slice(syms, func(i, j int) bool {
+ return syms[i].Name < syms[j].Name
+ })
+
+ w.uint64(uint64(len(syms)))
+ for _, sym := range syms {
+ w.string(sym.Name)
+ w.uint64(index[sym])
+ }
+ }
+}
+
+type iexporter struct {
+ // allPkgs tracks all packages that have been referenced by
+ // the export data, so we can ensure to include them in the
+ // main index.
+ allPkgs map[*types.Pkg]bool
+
+ declTodo ir.NameQueue
+
+ strings intWriter
+ stringIndex map[string]uint64
+
+ data0 intWriter
+ declIndex map[*types.Sym]uint64
+ inlineIndex map[*types.Sym]uint64
+ typIndex map[*types.Type]uint64
+}
+
+// stringOff returns the offset of s within the string section.
+// If not already present, it's added to the end.
+func (p *iexporter) stringOff(s string) uint64 {
+ off, ok := p.stringIndex[s]
+ if !ok {
+ off = uint64(p.strings.Len())
+ p.stringIndex[s] = off
+
+ if *base.Flag.LowerV {
+ fmt.Printf("export: str %v %.40q\n", off, s)
+ }
+
+ p.strings.uint64(uint64(len(s)))
+ p.strings.WriteString(s)
+ }
+ return off
+}
+
+// pushDecl adds n to the declaration work queue, if not already present.
+func (p *iexporter) pushDecl(n *ir.Name) {
+ if n.Sym() == nil || n.Sym().Def != n && n.Op() != ir.OTYPE {
+ base.Fatalf("weird Sym: %v, %v", n, n.Sym())
+ }
+
+ // Don't export predeclared declarations.
+ if n.Sym().Pkg == types.BuiltinPkg || n.Sym().Pkg == ir.Pkgs.Unsafe {
+ return
+ }
+
+ if _, ok := p.declIndex[n.Sym()]; ok {
+ return
+ }
+
+ p.declIndex[n.Sym()] = ^uint64(0) // mark n present in work queue
+ p.declTodo.PushRight(n)
+}
+
+// exportWriter handles writing out individual data section chunks.
+type exportWriter struct {
+ p *iexporter
+
+ data intWriter
+ currPkg *types.Pkg
+ prevFile string
+ prevLine int64
+ prevColumn int64
+}
+
+func (p *iexporter) doDecl(n *ir.Name) {
+ w := p.newWriter()
+ w.setPkg(n.Sym().Pkg, false)
+
+ switch n.Op() {
+ case ir.ONAME:
+ switch n.Class_ {
+ case ir.PEXTERN:
+ // Variable.
+ w.tag('V')
+ w.pos(n.Pos())
+ w.typ(n.Type())
+ w.varExt(n)
+
+ case ir.PFUNC:
+ if ir.IsMethod(n) {
+ base.Fatalf("unexpected method: %v", n)
+ }
+
+ // Function.
+ w.tag('F')
+ w.pos(n.Pos())
+ w.signature(n.Type())
+ w.funcExt(n)
+
+ default:
+ base.Fatalf("unexpected class: %v, %v", n, n.Class_)
+ }
+
+ case ir.OLITERAL:
+ // Constant.
+ // TODO(mdempsky): Do we still need this typecheck? If so, why?
+ n = Expr(n).(*ir.Name)
+ w.tag('C')
+ w.pos(n.Pos())
+ w.value(n.Type(), n.Val())
+
+ case ir.OTYPE:
+ if types.IsDotAlias(n.Sym()) {
+ // Alias.
+ w.tag('A')
+ w.pos(n.Pos())
+ w.typ(n.Type())
+ break
+ }
+
+ // Defined type.
+ w.tag('T')
+ w.pos(n.Pos())
+
+ underlying := n.Type().Underlying()
+ if underlying == types.ErrorType.Underlying() {
+ // For "type T error", use error as the
+ // underlying type instead of error's own
+ // underlying anonymous interface. This
+ // ensures consistency with how importers may
+ // declare error (e.g., go/types uses nil Pkg
+ // for predeclared objects).
+ underlying = types.ErrorType
+ }
+ w.typ(underlying)
+
+ t := n.Type()
+ if t.IsInterface() {
+ w.typeExt(t)
+ break
+ }
+
+ ms := t.Methods()
+ w.uint64(uint64(ms.Len()))
+ for _, m := range ms.Slice() {
+ w.pos(m.Pos)
+ w.selector(m.Sym)
+ w.param(m.Type.Recv())
+ w.signature(m.Type)
+ }
+
+ w.typeExt(t)
+ for _, m := range ms.Slice() {
+ w.methExt(m)
+ }
+
+ default:
+ base.Fatalf("unexpected node: %v", n)
+ }
+
+ w.finish("dcl", p.declIndex, n.Sym())
+}
+
+func (w *exportWriter) tag(tag byte) {
+ w.data.WriteByte(tag)
+}
+
+func (w *exportWriter) finish(what string, index map[*types.Sym]uint64, sym *types.Sym) {
+ off := w.flush()
+ if *base.Flag.LowerV {
+ fmt.Printf("export: %v %v %v\n", what, off, sym)
+ }
+ index[sym] = off
+}
+
+func (p *iexporter) doInline(f *ir.Name) {
+ w := p.newWriter()
+ w.setPkg(fnpkg(f), false)
+
+ w.stmtList(ir.Nodes(f.Func.Inl.Body))
+
+ w.finish("inl", p.inlineIndex, f.Sym())
+}
+
+func (w *exportWriter) pos(pos src.XPos) {
+ p := base.Ctxt.PosTable.Pos(pos)
+ file := p.Base().AbsFilename()
+ line := int64(p.RelLine())
+ column := int64(p.RelCol())
+
+ // Encode position relative to the last position: column
+ // delta, then line delta, then file name. We reserve the
+ // bottom bit of the column and line deltas to encode whether
+ // the remaining fields are present.
+ //
+ // Note: Because data objects may be read out of order (or not
+ // at all), we can only apply delta encoding within a single
+ // object. This is handled implicitly by tracking prevFile,
+ // prevLine, and prevColumn as fields of exportWriter.
+
+ deltaColumn := (column - w.prevColumn) << 1
+ deltaLine := (line - w.prevLine) << 1
+
+ if file != w.prevFile {
+ deltaLine |= 1
+ }
+ if deltaLine != 0 {
+ deltaColumn |= 1
+ }
+
+ w.int64(deltaColumn)
+ if deltaColumn&1 != 0 {
+ w.int64(deltaLine)
+ if deltaLine&1 != 0 {
+ w.string(file)
+ }
+ }
+
+ w.prevFile = file
+ w.prevLine = line
+ w.prevColumn = column
+}
+
+func (w *exportWriter) pkg(pkg *types.Pkg) {
+ // Ensure any referenced packages are declared in the main index.
+ w.p.allPkgs[pkg] = true
+
+ w.string(pkg.Path)
+}
+
+func (w *exportWriter) qualifiedIdent(n ir.Node) {
+ // Ensure any referenced declarations are written out too.
+ w.p.pushDecl(n.Name())
+
+ s := n.Sym()
+ w.string(s.Name)
+ w.pkg(s.Pkg)
+}
+
+func (w *exportWriter) selector(s *types.Sym) {
+ if w.currPkg == nil {
+ base.Fatalf("missing currPkg")
+ }
+
+ // Method selectors are rewritten into method symbols (of the
+ // form T.M) during typechecking, but we want to write out
+ // just the bare method name.
+ name := s.Name
+ if i := strings.LastIndex(name, "."); i >= 0 {
+ name = name[i+1:]
+ } else {
+ pkg := w.currPkg
+ if types.IsExported(name) {
+ pkg = types.LocalPkg
+ }
+ if s.Pkg != pkg {
+ base.Fatalf("package mismatch in selector: %v in package %q, but want %q", s, s.Pkg.Path, pkg.Path)
+ }
+ }
+
+ w.string(name)
+}
+
+func (w *exportWriter) typ(t *types.Type) {
+ w.data.uint64(w.p.typOff(t))
+}
+
+func (p *iexporter) newWriter() *exportWriter {
+ return &exportWriter{p: p}
+}
+
+func (w *exportWriter) flush() uint64 {
+ off := uint64(w.p.data0.Len())
+ io.Copy(&w.p.data0, &w.data)
+ return off
+}
+
+func (p *iexporter) typOff(t *types.Type) uint64 {
+ off, ok := p.typIndex[t]
+ if !ok {
+ w := p.newWriter()
+ w.doTyp(t)
+ rawOff := w.flush()
+ if *base.Flag.LowerV {
+ fmt.Printf("export: typ %v %v\n", rawOff, t)
+ }
+ off = predeclReserved + rawOff
+ p.typIndex[t] = off
+ }
+ return off
+}
+
+func (w *exportWriter) startType(k itag) {
+ w.data.uint64(uint64(k))
+}
+
+func (w *exportWriter) doTyp(t *types.Type) {
+ if t.Sym() != nil {
+ if t.Sym().Pkg == types.BuiltinPkg || t.Sym().Pkg == ir.Pkgs.Unsafe {
+ base.Fatalf("builtin type missing from typIndex: %v", t)
+ }
+
+ w.startType(definedType)
+ w.qualifiedIdent(t.Obj().(*ir.Name))
+ return
+ }
+
+ switch t.Kind() {
+ case types.TPTR:
+ w.startType(pointerType)
+ w.typ(t.Elem())
+
+ case types.TSLICE:
+ w.startType(sliceType)
+ w.typ(t.Elem())
+
+ case types.TARRAY:
+ w.startType(arrayType)
+ w.uint64(uint64(t.NumElem()))
+ w.typ(t.Elem())
+
+ case types.TCHAN:
+ w.startType(chanType)
+ w.uint64(uint64(t.ChanDir()))
+ w.typ(t.Elem())
+
+ case types.TMAP:
+ w.startType(mapType)
+ w.typ(t.Key())
+ w.typ(t.Elem())
+
+ case types.TFUNC:
+ w.startType(signatureType)
+ w.setPkg(t.Pkg(), true)
+ w.signature(t)
+
+ case types.TSTRUCT:
+ w.startType(structType)
+ w.setPkg(t.Pkg(), true)
+
+ w.uint64(uint64(t.NumFields()))
+ for _, f := range t.FieldSlice() {
+ w.pos(f.Pos)
+ w.selector(f.Sym)
+ w.typ(f.Type)
+ w.bool(f.Embedded != 0)
+ w.string(f.Note)
+ }
+
+ case types.TINTER:
+ var embeddeds, methods []*types.Field
+ for _, m := range t.Methods().Slice() {
+ if m.Sym != nil {
+ methods = append(methods, m)
+ } else {
+ embeddeds = append(embeddeds, m)
+ }
+ }
+
+ w.startType(interfaceType)
+ w.setPkg(t.Pkg(), true)
+
+ w.uint64(uint64(len(embeddeds)))
+ for _, f := range embeddeds {
+ w.pos(f.Pos)
+ w.typ(f.Type)
+ }
+
+ w.uint64(uint64(len(methods)))
+ for _, f := range methods {
+ w.pos(f.Pos)
+ w.selector(f.Sym)
+ w.signature(f.Type)
+ }
+
+ default:
+ base.Fatalf("unexpected type: %v", t)
+ }
+}
+
+func (w *exportWriter) setPkg(pkg *types.Pkg, write bool) {
+ if pkg == types.NoPkg {
+ base.Fatalf("missing pkg")
+ }
+
+ if write {
+ w.pkg(pkg)
+ }
+
+ w.currPkg = pkg
+}
+
+func (w *exportWriter) signature(t *types.Type) {
+ w.paramList(t.Params().FieldSlice())
+ w.paramList(t.Results().FieldSlice())
+ if n := t.Params().NumFields(); n > 0 {
+ w.bool(t.Params().Field(n - 1).IsDDD())
+ }
+}
+
+func (w *exportWriter) paramList(fs []*types.Field) {
+ w.uint64(uint64(len(fs)))
+ for _, f := range fs {
+ w.param(f)
+ }
+}
+
+func (w *exportWriter) param(f *types.Field) {
+ w.pos(f.Pos)
+ w.localIdent(types.OrigSym(f.Sym), 0)
+ w.typ(f.Type)
+}
+
+func constTypeOf(typ *types.Type) constant.Kind {
+ switch typ {
+ case types.UntypedInt, types.UntypedRune:
+ return constant.Int
+ case types.UntypedFloat:
+ return constant.Float
+ case types.UntypedComplex:
+ return constant.Complex
+ }
+
+ switch typ.Kind() {
+ case types.TBOOL:
+ return constant.Bool
+ case types.TSTRING:
+ return constant.String
+ case types.TINT, types.TINT8, types.TINT16, types.TINT32, types.TINT64,
+ types.TUINT, types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINTPTR:
+ return constant.Int
+ case types.TFLOAT32, types.TFLOAT64:
+ return constant.Float
+ case types.TCOMPLEX64, types.TCOMPLEX128:
+ return constant.Complex
+ }
+
+ base.Fatalf("unexpected constant type: %v", typ)
+ return 0
+}
+
+func (w *exportWriter) value(typ *types.Type, v constant.Value) {
+ ir.AssertValidTypeForConst(typ, v)
+ w.typ(typ)
+
+ // Each type has only one admissible constant representation,
+ // so we could type switch directly on v.U here. However,
+ // switching on the type increases symmetry with import logic
+ // and provides a useful consistency check.
+
+ switch constTypeOf(typ) {
+ case constant.Bool:
+ w.bool(constant.BoolVal(v))
+ case constant.String:
+ w.string(constant.StringVal(v))
+ case constant.Int:
+ w.mpint(v, typ)
+ case constant.Float:
+ w.mpfloat(v, typ)
+ case constant.Complex:
+ w.mpfloat(constant.Real(v), typ)
+ w.mpfloat(constant.Imag(v), typ)
+ }
+}
+
+func intSize(typ *types.Type) (signed bool, maxBytes uint) {
+ if typ.IsUntyped() {
+ return true, ir.ConstPrec / 8
+ }
+
+ switch typ.Kind() {
+ case types.TFLOAT32, types.TCOMPLEX64:
+ return true, 3
+ case types.TFLOAT64, types.TCOMPLEX128:
+ return true, 7
+ }
+
+ signed = typ.IsSigned()
+ maxBytes = uint(typ.Size())
+
+ // The go/types API doesn't expose sizes to importers, so they
+ // don't know how big these types are.
+ switch typ.Kind() {
+ case types.TINT, types.TUINT, types.TUINTPTR:
+ maxBytes = 8
+ }
+
+ return
+}
+
+// mpint exports a multi-precision integer.
+//
+// For unsigned types, small values are written out as a single
+// byte. Larger values are written out as a length-prefixed big-endian
+// byte string, where the length prefix is encoded as its complement.
+// For example, bytes 0, 1, and 2 directly represent the integer
+// values 0, 1, and 2; while bytes 255, 254, and 253 indicate a 1-,
+// 2-, and 3-byte big-endian string follow.
+//
+// Encoding for signed types use the same general approach as for
+// unsigned types, except small values use zig-zag encoding and the
+// bottom bit of length prefix byte for large values is reserved as a
+// sign bit.
+//
+// The exact boundary between small and large encodings varies
+// according to the maximum number of bytes needed to encode a value
+// of type typ. As a special case, 8-bit types are always encoded as a
+// single byte.
+//
+// TODO(mdempsky): Is this level of complexity really worthwhile?
+func (w *exportWriter) mpint(x constant.Value, typ *types.Type) {
+ signed, maxBytes := intSize(typ)
+
+ negative := constant.Sign(x) < 0
+ if !signed && negative {
+ base.Fatalf("negative unsigned integer; type %v, value %v", typ, x)
+ }
+
+ b := constant.Bytes(x) // little endian
+ for i, j := 0, len(b)-1; i < j; i, j = i+1, j-1 {
+ b[i], b[j] = b[j], b[i]
+ }
+
+ if len(b) > 0 && b[0] == 0 {
+ base.Fatalf("leading zeros")
+ }
+ if uint(len(b)) > maxBytes {
+ base.Fatalf("bad mpint length: %d > %d (type %v, value %v)", len(b), maxBytes, typ, x)
+ }
+
+ maxSmall := 256 - maxBytes
+ if signed {
+ maxSmall = 256 - 2*maxBytes
+ }
+ if maxBytes == 1 {
+ maxSmall = 256
+ }
+
+ // Check if x can use small value encoding.
+ if len(b) <= 1 {
+ var ux uint
+ if len(b) == 1 {
+ ux = uint(b[0])
+ }
+ if signed {
+ ux <<= 1
+ if negative {
+ ux--
+ }
+ }
+ if ux < maxSmall {
+ w.data.WriteByte(byte(ux))
+ return
+ }
+ }
+
+ n := 256 - uint(len(b))
+ if signed {
+ n = 256 - 2*uint(len(b))
+ if negative {
+ n |= 1
+ }
+ }
+ if n < maxSmall || n >= 256 {
+ base.Fatalf("encoding mistake: %d, %v, %v => %d", len(b), signed, negative, n)
+ }
+
+ w.data.WriteByte(byte(n))
+ w.data.Write(b)
+}
+
+// mpfloat exports a multi-precision floating point number.
+//
+// The number's value is decomposed into mantissa × 2**exponent, where
+// mantissa is an integer. The value is written out as mantissa (as a
+// multi-precision integer) and then the exponent, except exponent is
+// omitted if mantissa is zero.
+func (w *exportWriter) mpfloat(v constant.Value, typ *types.Type) {
+ f := ir.BigFloat(v)
+ if f.IsInf() {
+ base.Fatalf("infinite constant")
+ }
+
+ // Break into f = mant × 2**exp, with 0.5 <= mant < 1.
+ var mant big.Float
+ exp := int64(f.MantExp(&mant))
+
+ // Scale so that mant is an integer.
+ prec := mant.MinPrec()
+ mant.SetMantExp(&mant, int(prec))
+ exp -= int64(prec)
+
+ manti, acc := mant.Int(nil)
+ if acc != big.Exact {
+ base.Fatalf("mantissa scaling failed for %f (%s)", f, acc)
+ }
+ w.mpint(makeInt(manti), typ)
+ if manti.Sign() != 0 {
+ w.int64(exp)
+ }
+}
+
+func (w *exportWriter) bool(b bool) bool {
+ var x uint64
+ if b {
+ x = 1
+ }
+ w.uint64(x)
+ return b
+}
+
+func (w *exportWriter) int64(x int64) { w.data.int64(x) }
+func (w *exportWriter) uint64(x uint64) { w.data.uint64(x) }
+func (w *exportWriter) string(s string) { w.uint64(w.p.stringOff(s)) }
+
+// Compiler-specific extensions.
+
+func (w *exportWriter) varExt(n ir.Node) {
+ w.linkname(n.Sym())
+ w.symIdx(n.Sym())
+}
+
+func (w *exportWriter) funcExt(n *ir.Name) {
+ w.linkname(n.Sym())
+ w.symIdx(n.Sym())
+
+ // Escape analysis.
+ for _, fs := range &types.RecvsParams {
+ for _, f := range fs(n.Type()).FieldSlice() {
+ w.string(f.Note)
+ }
+ }
+
+ // Inline body.
+ if n.Func.Inl != nil {
+ w.uint64(1 + uint64(n.Func.Inl.Cost))
+ if n.Func.ExportInline() {
+ w.p.doInline(n)
+ }
+
+ // Endlineno for inlined function.
+ w.pos(n.Func.Endlineno)
+ } else {
+ w.uint64(0)
+ }
+}
+
+func (w *exportWriter) methExt(m *types.Field) {
+ w.bool(m.Nointerface())
+ w.funcExt(m.Nname.(*ir.Name))
+}
+
+func (w *exportWriter) linkname(s *types.Sym) {
+ w.string(s.Linkname)
+}
+
+func (w *exportWriter) symIdx(s *types.Sym) {
+ lsym := s.Linksym()
+ if lsym.PkgIdx > goobj.PkgIdxSelf || (lsym.PkgIdx == goobj.PkgIdxInvalid && !lsym.Indexed()) || s.Linkname != "" {
+ // Don't export index for non-package symbols, linkname'd symbols,
+ // and symbols without an index. They can only be referenced by
+ // name.
+ w.int64(-1)
+ } else {
+ // For a defined symbol, export its index.
+ // For re-exporting an imported symbol, pass its index through.
+ w.int64(int64(lsym.SymIdx))
+ }
+}
+
+func (w *exportWriter) typeExt(t *types.Type) {
+ // Export whether this type is marked notinheap.
+ w.bool(t.NotInHeap())
+ // For type T, export the index of type descriptor symbols of T and *T.
+ if i, ok := typeSymIdx[t]; ok {
+ w.int64(i[0])
+ w.int64(i[1])
+ return
+ }
+ w.symIdx(types.TypeSym(t))
+ w.symIdx(types.TypeSym(t.PtrTo()))
+}
+
+// Inline bodies.
+
+func (w *exportWriter) stmtList(list ir.Nodes) {
+ for _, n := range list {
+ w.node(n)
+ }
+ w.op(ir.OEND)
+}
+
+func (w *exportWriter) node(n ir.Node) {
+ if ir.OpPrec[n.Op()] < 0 {
+ w.stmt(n)
+ } else {
+ w.expr(n)
+ }
+}
+
+// Caution: stmt will emit more than one node for statement nodes n that have a non-empty
+// n.Ninit and where n cannot have a natural init section (such as in "if", "for", etc.).
+func (w *exportWriter) stmt(n ir.Node) {
+ if len(n.Init()) > 0 && !ir.StmtWithInit(n.Op()) {
+ // can't use stmtList here since we don't want the final OEND
+ for _, n := range n.Init() {
+ w.stmt(n)
+ }
+ }
+
+ switch n.Op() {
+ case ir.OBLOCK:
+ // No OBLOCK in export data.
+ // Inline content into this statement list,
+ // like the init list above.
+ // (At the moment neither the parser nor the typechecker
+ // generate OBLOCK nodes except to denote an empty
+ // function body, although that may change.)
+ n := n.(*ir.BlockStmt)
+ for _, n := range n.List {
+ w.stmt(n)
+ }
+
+ case ir.ODCL:
+ n := n.(*ir.Decl)
+ w.op(ir.ODCL)
+ w.pos(n.X.Pos())
+ w.localName(n.X.(*ir.Name))
+ w.typ(n.X.Type())
+
+ case ir.OAS:
+ // Don't export "v = <N>" initializing statements, hope they're always
+ // preceded by the DCL which will be re-parsed and typecheck to reproduce
+ // the "v = <N>" again.
+ n := n.(*ir.AssignStmt)
+ if n.Y != nil {
+ w.op(ir.OAS)
+ w.pos(n.Pos())
+ w.expr(n.X)
+ w.expr(n.Y)
+ }
+
+ case ir.OASOP:
+ n := n.(*ir.AssignOpStmt)
+ w.op(ir.OASOP)
+ w.pos(n.Pos())
+ w.op(n.AsOp)
+ w.expr(n.X)
+ if w.bool(!n.IncDec) {
+ w.expr(n.Y)
+ }
+
+ case ir.OAS2, ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2MAPR, ir.OAS2RECV:
+ n := n.(*ir.AssignListStmt)
+ w.op(ir.OAS2)
+ w.pos(n.Pos())
+ w.exprList(n.Lhs)
+ w.exprList(n.Rhs)
+
+ case ir.ORETURN:
+ n := n.(*ir.ReturnStmt)
+ w.op(ir.ORETURN)
+ w.pos(n.Pos())
+ w.exprList(n.Results)
+
+ // case ORETJMP:
+ // unreachable - generated by compiler for trampolin routines
+
+ case ir.OGO, ir.ODEFER:
+ n := n.(*ir.GoDeferStmt)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.expr(n.Call)
+
+ case ir.OIF:
+ n := n.(*ir.IfStmt)
+ w.op(ir.OIF)
+ w.pos(n.Pos())
+ w.stmtList(n.Init())
+ w.expr(n.Cond)
+ w.stmtList(n.Body)
+ w.stmtList(n.Else)
+
+ case ir.OFOR:
+ n := n.(*ir.ForStmt)
+ w.op(ir.OFOR)
+ w.pos(n.Pos())
+ w.stmtList(n.Init())
+ w.exprsOrNil(n.Cond, n.Post)
+ w.stmtList(n.Body)
+
+ case ir.ORANGE:
+ n := n.(*ir.RangeStmt)
+ w.op(ir.ORANGE)
+ w.pos(n.Pos())
+ w.stmtList(n.Vars)
+ w.expr(n.X)
+ w.stmtList(n.Body)
+
+ case ir.OSELECT:
+ n := n.(*ir.SelectStmt)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.stmtList(n.Init())
+ w.exprsOrNil(nil, nil) // TODO(rsc): Delete (and fix importer).
+ w.caseList(n)
+
+ case ir.OSWITCH:
+ n := n.(*ir.SwitchStmt)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.stmtList(n.Init())
+ w.exprsOrNil(n.Tag, nil)
+ w.caseList(n)
+
+ // case OCASE:
+ // handled by caseList
+
+ case ir.OFALL:
+ n := n.(*ir.BranchStmt)
+ w.op(ir.OFALL)
+ w.pos(n.Pos())
+
+ case ir.OBREAK, ir.OCONTINUE, ir.OGOTO, ir.OLABEL:
+ w.op(n.Op())
+ w.pos(n.Pos())
+ label := ""
+ if sym := n.Sym(); sym != nil {
+ label = sym.Name
+ }
+ w.string(label)
+
+ default:
+ base.Fatalf("exporter: CANNOT EXPORT: %v\nPlease notify gri@\n", n.Op())
+ }
+}
+
+func isNamedTypeSwitch(n ir.Node) bool {
+ if n.Op() != ir.OSWITCH {
+ return false
+ }
+ sw := n.(*ir.SwitchStmt)
+ if sw.Tag == nil || sw.Tag.Op() != ir.OTYPESW {
+ return false
+ }
+ guard := sw.Tag.(*ir.TypeSwitchGuard)
+ return guard.Tag != nil
+}
+
+func (w *exportWriter) caseList(sw ir.Node) {
+ namedTypeSwitch := isNamedTypeSwitch(sw)
+
+ var cases []ir.Node
+ if sw.Op() == ir.OSWITCH {
+ cases = sw.(*ir.SwitchStmt).Cases
+ } else {
+ cases = sw.(*ir.SelectStmt).Cases
+ }
+ w.uint64(uint64(len(cases)))
+ for _, cas := range cases {
+ cas := cas.(*ir.CaseStmt)
+ w.pos(cas.Pos())
+ w.stmtList(cas.List)
+ if namedTypeSwitch {
+ w.localName(cas.Vars[0].(*ir.Name))
+ }
+ w.stmtList(cas.Body)
+ }
+}
+
+func (w *exportWriter) exprList(list ir.Nodes) {
+ for _, n := range list {
+ w.expr(n)
+ }
+ w.op(ir.OEND)
+}
+
+func simplifyForExport(n ir.Node) ir.Node {
+ switch n.Op() {
+ case ir.OPAREN:
+ n := n.(*ir.ParenExpr)
+ return simplifyForExport(n.X)
+ case ir.ODEREF:
+ n := n.(*ir.StarExpr)
+ if n.Implicit() {
+ return simplifyForExport(n.X)
+ }
+ case ir.OADDR:
+ n := n.(*ir.AddrExpr)
+ if n.Implicit() {
+ return simplifyForExport(n.X)
+ }
+ case ir.ODOT, ir.ODOTPTR:
+ n := n.(*ir.SelectorExpr)
+ if n.Implicit() {
+ return simplifyForExport(n.X)
+ }
+ }
+ return n
+}
+
+func (w *exportWriter) expr(n ir.Node) {
+ n = simplifyForExport(n)
+ switch n.Op() {
+ // expressions
+ // (somewhat closely following the structure of exprfmt in fmt.go)
+ case ir.ONIL:
+ n := n.(*ir.NilExpr)
+ if !n.Type().HasNil() {
+ base.Fatalf("unexpected type for nil: %v", n.Type())
+ }
+ w.op(ir.ONIL)
+ w.pos(n.Pos())
+ w.typ(n.Type())
+
+ case ir.OLITERAL:
+ w.op(ir.OLITERAL)
+ w.pos(n.Pos())
+ w.value(n.Type(), n.Val())
+
+ case ir.OMETHEXPR:
+ // Special case: explicit name of func (*T) method(...) is turned into pkg.(*T).method,
+ // but for export, this should be rendered as (*pkg.T).meth.
+ // These nodes have the special property that they are names with a left OTYPE and a right ONAME.
+ n := n.(*ir.MethodExpr)
+ w.op(ir.OXDOT)
+ w.pos(n.Pos())
+ w.op(ir.OTYPE)
+ w.typ(n.T) // n.Left.Op == OTYPE
+ w.selector(n.Method.Sym)
+
+ case ir.ONAME:
+ // Package scope name.
+ n := n.(*ir.Name)
+ if (n.Class_ == ir.PEXTERN || n.Class_ == ir.PFUNC) && !ir.IsBlank(n) {
+ w.op(ir.ONONAME)
+ w.qualifiedIdent(n)
+ break
+ }
+
+ // Function scope name.
+ w.op(ir.ONAME)
+ w.localName(n)
+
+ // case OPACK, ONONAME:
+ // should have been resolved by typechecking - handled by default case
+
+ case ir.OTYPE:
+ w.op(ir.OTYPE)
+ w.typ(n.Type())
+
+ case ir.OTYPESW:
+ n := n.(*ir.TypeSwitchGuard)
+ w.op(ir.OTYPESW)
+ w.pos(n.Pos())
+ var s *types.Sym
+ if n.Tag != nil {
+ if n.Tag.Op() != ir.ONONAME {
+ base.Fatalf("expected ONONAME, got %v", n.Tag)
+ }
+ s = n.Tag.Sym()
+ }
+ w.localIdent(s, 0) // declared pseudo-variable, if any
+ w.exprsOrNil(n.X, nil)
+
+ // case OTARRAY, OTMAP, OTCHAN, OTSTRUCT, OTINTER, OTFUNC:
+ // should have been resolved by typechecking - handled by default case
+
+ // case OCLOSURE:
+ // unimplemented - handled by default case
+
+ // case OCOMPLIT:
+ // should have been resolved by typechecking - handled by default case
+
+ case ir.OPTRLIT:
+ n := n.(*ir.AddrExpr)
+ w.op(ir.OADDR)
+ w.pos(n.Pos())
+ w.expr(n.X)
+
+ case ir.OSTRUCTLIT:
+ n := n.(*ir.CompLitExpr)
+ w.op(ir.OSTRUCTLIT)
+ w.pos(n.Pos())
+ w.typ(n.Type())
+ w.fieldList(n.List) // special handling of field names
+
+ case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT:
+ n := n.(*ir.CompLitExpr)
+ w.op(ir.OCOMPLIT)
+ w.pos(n.Pos())
+ w.typ(n.Type())
+ w.exprList(n.List)
+
+ case ir.OKEY:
+ n := n.(*ir.KeyExpr)
+ w.op(ir.OKEY)
+ w.pos(n.Pos())
+ w.exprsOrNil(n.Key, n.Value)
+
+ // case OSTRUCTKEY:
+ // unreachable - handled in case OSTRUCTLIT by elemList
+
+ case ir.OCALLPART:
+ // An OCALLPART is an OXDOT before type checking.
+ n := n.(*ir.CallPartExpr)
+ w.op(ir.OXDOT)
+ w.pos(n.Pos())
+ w.expr(n.X)
+ w.selector(n.Method.Sym)
+
+ case ir.OXDOT, ir.ODOT, ir.ODOTPTR, ir.ODOTINTER, ir.ODOTMETH:
+ n := n.(*ir.SelectorExpr)
+ w.op(ir.OXDOT)
+ w.pos(n.Pos())
+ w.expr(n.X)
+ w.selector(n.Sel)
+
+ case ir.ODOTTYPE, ir.ODOTTYPE2:
+ n := n.(*ir.TypeAssertExpr)
+ w.op(ir.ODOTTYPE)
+ w.pos(n.Pos())
+ w.expr(n.X)
+ w.typ(n.Type())
+
+ case ir.OINDEX, ir.OINDEXMAP:
+ n := n.(*ir.IndexExpr)
+ w.op(ir.OINDEX)
+ w.pos(n.Pos())
+ w.expr(n.X)
+ w.expr(n.Index)
+
+ case ir.OSLICE, ir.OSLICESTR, ir.OSLICEARR:
+ n := n.(*ir.SliceExpr)
+ w.op(ir.OSLICE)
+ w.pos(n.Pos())
+ w.expr(n.X)
+ low, high, _ := n.SliceBounds()
+ w.exprsOrNil(low, high)
+
+ case ir.OSLICE3, ir.OSLICE3ARR:
+ n := n.(*ir.SliceExpr)
+ w.op(ir.OSLICE3)
+ w.pos(n.Pos())
+ w.expr(n.X)
+ low, high, max := n.SliceBounds()
+ w.exprsOrNil(low, high)
+ w.expr(max)
+
+ case ir.OCOPY, ir.OCOMPLEX:
+ // treated like other builtin calls (see e.g., OREAL)
+ n := n.(*ir.BinaryExpr)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.expr(n.X)
+ w.expr(n.Y)
+ w.op(ir.OEND)
+
+ case ir.OCONV, ir.OCONVIFACE, ir.OCONVNOP, ir.OBYTES2STR, ir.ORUNES2STR, ir.OSTR2BYTES, ir.OSTR2RUNES, ir.ORUNESTR:
+ n := n.(*ir.ConvExpr)
+ w.op(ir.OCONV)
+ w.pos(n.Pos())
+ w.expr(n.X)
+ w.typ(n.Type())
+
+ case ir.OREAL, ir.OIMAG, ir.OCAP, ir.OCLOSE, ir.OLEN, ir.ONEW, ir.OPANIC:
+ n := n.(*ir.UnaryExpr)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.expr(n.X)
+ w.op(ir.OEND)
+
+ case ir.OAPPEND, ir.ODELETE, ir.ORECOVER, ir.OPRINT, ir.OPRINTN:
+ n := n.(*ir.CallExpr)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.exprList(n.Args) // emits terminating OEND
+ // only append() calls may contain '...' arguments
+ if n.Op() == ir.OAPPEND {
+ w.bool(n.IsDDD)
+ } else if n.IsDDD {
+ base.Fatalf("exporter: unexpected '...' with %v call", n.Op())
+ }
+
+ case ir.OCALL, ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER, ir.OGETG:
+ n := n.(*ir.CallExpr)
+ w.op(ir.OCALL)
+ w.pos(n.Pos())
+ w.stmtList(n.Init())
+ w.expr(n.X)
+ w.exprList(n.Args)
+ w.bool(n.IsDDD)
+
+ case ir.OMAKEMAP, ir.OMAKECHAN, ir.OMAKESLICE:
+ n := n.(*ir.MakeExpr)
+ w.op(n.Op()) // must keep separate from OMAKE for importer
+ w.pos(n.Pos())
+ w.typ(n.Type())
+ switch {
+ default:
+ // empty list
+ w.op(ir.OEND)
+ case n.Cap != nil:
+ w.expr(n.Len)
+ w.expr(n.Cap)
+ w.op(ir.OEND)
+ case n.Len != nil && (n.Op() == ir.OMAKESLICE || !n.Len.Type().IsUntyped()):
+ w.expr(n.Len)
+ w.op(ir.OEND)
+ }
+
+ // unary expressions
+ case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT, ir.ORECV:
+ n := n.(*ir.UnaryExpr)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.expr(n.X)
+
+ case ir.OADDR:
+ n := n.(*ir.AddrExpr)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.expr(n.X)
+
+ case ir.ODEREF:
+ n := n.(*ir.StarExpr)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.expr(n.X)
+
+ case ir.OSEND:
+ n := n.(*ir.SendStmt)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.expr(n.Chan)
+ w.expr(n.Value)
+
+ // binary expressions
+ case ir.OADD, ir.OAND, ir.OANDNOT, ir.ODIV, ir.OEQ, ir.OGE, ir.OGT, ir.OLE, ir.OLT,
+ ir.OLSH, ir.OMOD, ir.OMUL, ir.ONE, ir.OOR, ir.ORSH, ir.OSUB, ir.OXOR:
+ n := n.(*ir.BinaryExpr)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.expr(n.X)
+ w.expr(n.Y)
+
+ case ir.OANDAND, ir.OOROR:
+ n := n.(*ir.LogicalExpr)
+ w.op(n.Op())
+ w.pos(n.Pos())
+ w.expr(n.X)
+ w.expr(n.Y)
+
+ case ir.OADDSTR:
+ n := n.(*ir.AddStringExpr)
+ w.op(ir.OADDSTR)
+ w.pos(n.Pos())
+ w.exprList(n.List)
+
+ case ir.ODCLCONST:
+ // if exporting, DCLCONST should just be removed as its usage
+ // has already been replaced with literals
+
+ default:
+ base.Fatalf("cannot export %v (%d) node\n"+
+ "\t==> please file an issue and assign to gri@", n.Op(), int(n.Op()))
+ }
+}
+
+func (w *exportWriter) op(op ir.Op) {
+ w.uint64(uint64(op))
+}
+
+func (w *exportWriter) exprsOrNil(a, b ir.Node) {
+ ab := 0
+ if a != nil {
+ ab |= 1
+ }
+ if b != nil {
+ ab |= 2
+ }
+ w.uint64(uint64(ab))
+ if ab&1 != 0 {
+ w.expr(a)
+ }
+ if ab&2 != 0 {
+ w.node(b)
+ }
+}
+
+func (w *exportWriter) fieldList(list ir.Nodes) {
+ w.uint64(uint64(len(list)))
+ for _, n := range list {
+ n := n.(*ir.StructKeyExpr)
+ w.selector(n.Field)
+ w.expr(n.Value)
+ }
+}
+
+func (w *exportWriter) localName(n *ir.Name) {
+ // Escape analysis happens after inline bodies are saved, but
+ // we're using the same ONAME nodes, so we might still see
+ // PAUTOHEAP here.
+ //
+ // Check for Stackcopy to identify PAUTOHEAP that came from
+ // PPARAM/PPARAMOUT, because we only want to include vargen in
+ // non-param names.
+ var v int32
+ if n.Class_ == ir.PAUTO || (n.Class_ == ir.PAUTOHEAP && n.Name().Stackcopy == nil) {
+ v = n.Name().Vargen
+ }
+
+ w.localIdent(n.Sym(), v)
+}
+
+func (w *exportWriter) localIdent(s *types.Sym, v int32) {
+ // Anonymous parameters.
+ if s == nil {
+ w.string("")
+ return
+ }
+
+ name := s.Name
+ if name == "_" {
+ w.string("_")
+ return
+ }
+
+ // TODO(mdempsky): Fix autotmp hack.
+ if i := strings.LastIndex(name, "."); i >= 0 && !strings.HasPrefix(name, ".autotmp_") {
+ base.Fatalf("unexpected dot in identifier: %v", name)
+ }
+
+ if v > 0 {
+ if strings.Contains(name, "·") {
+ base.Fatalf("exporter: unexpected · in symbol name")
+ }
+ name = fmt.Sprintf("%s·%d", name, v)
+ }
+
+ if !types.IsExported(name) && s.Pkg != w.currPkg {
+ base.Fatalf("weird package in name: %v => %v, not %q", s, name, w.currPkg.Path)
+ }
+
+ w.string(name)
+}
+
+type intWriter struct {
+ bytes.Buffer
+}
+
+func (w *intWriter) int64(x int64) {
+ var buf [binary.MaxVarintLen64]byte
+ n := binary.PutVarint(buf[:], x)
+ w.Write(buf[:n])
+}
+
+func (w *intWriter) uint64(x uint64) {
+ var buf [binary.MaxVarintLen64]byte
+ n := binary.PutUvarint(buf[:], x)
+ w.Write(buf[:n])
+}