aboutsummaryrefslogtreecommitdiff
path: root/src/cmd
diff options
context:
space:
mode:
authorMichael Matloob <matloob@golang.org>2021-08-24 17:24:19 -0400
committerMichael Matloob <matloob@golang.org>2021-08-25 11:31:59 -0400
commit109c13b64f815230af65af5c21e67cd7baa62a05 (patch)
tree32bd06a5bd6c81b5e0e88f67e93f463a3d9d76ea /src/cmd
parente2e1987b31a587bdb67856954ae9279721b3bba7 (diff)
parentc2f96e686fe9383711d23aea95a34a280fdd0e49 (diff)
downloadgo-109c13b64f815230af65af5c21e67cd7baa62a05.tar.gz
go-109c13b64f815230af65af5c21e67cd7baa62a05.zip
[dev.cmdgo] all: merge master (c2f96e6) into dev.cmdgo
src/cmd/go/testdata/script/work.txt and src/cmd/go/testdata/script/work_edit.txt were updated so the want files specified go1.18 as that's what go mod initwork will produce under Go 1.18. Conflicts: - src/cmd/go.mod - src/cmd/go.sum - src/cmd/go/internal/modload/init.go - src/cmd/go/internal/work/init.go - src/cmd/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go - src/cmd/vendor/golang.org/x/tools/internal/typeparams/notypeparams.go - src/cmd/vendor/golang.org/x/tools/internal/typeparams/typeparams.go - src/cmd/vendor/modules.txt - src/cmd/vet/testdata/print/print.go Merge List: + 2021-08-24 c2f96e686f cmd/compile: mark ODYNAMICDOTTYPE as an expression that can panic + 2021-08-24 5b64381155 cmd/compile: fix naming of types inside instantiations + 2021-08-24 4a9f0cec29 cmd/compile: change irgen to generate exprs/stmts after decls processed + 2021-08-24 daa55b21d1 cmd/link: guarantee "section .debug_gdb_scripts" is always "$GOROOT/src/runtime/runtime-gdb.py". + 2021-08-24 e6798795ff cmd/compile/internal/types2: use TypeList in the Inferred struct + 2021-08-24 b1cdf860dd cmd/compile/internal/types2: use a TypeList type to hold type arguments + 2021-08-24 1ff0554b53 cmd/compile/internal/types2: use []*TypeParam rather than []*TypeName for type param lists + 2021-08-24 bd97763577 cmd/compile/internal/types2: use an opaque environment for Instantiate + 2021-08-24 bba460499c cmd/compile/internal/types2: don't export TypeSet + 2021-08-24 d70c69d830 embed: document the maximum file size supported + 2021-08-24 f98b6111eb go/token: match the implementation of index selection with sort.Search + 2021-08-24 8eeb1bff1d cmd/compile: reuse same node for global dictionaries + 2021-08-23 be1a693477 cmd/compile: fixes for non-constant Sizeof/Alignof/Offsetof + 2021-08-23 8157960d7f all: replace runtime SSE2 detection with GO386 setting + 2021-08-23 22540abf76 runtime: use RDTSCP for instruction stream serialized read of TSC + 2021-08-23 fa34678c67 internal/buildcfg: change GOEXPERIMENT to always return non-empty string + 2021-08-23 0a7f00ae23 cmd/compile: do not mark arrays used for map initialization noalg + 2021-08-23 6b9e3f883e cmd/compile: don't emit write barriers for offsets of global addresses + 2021-08-23 3081f817da cmd/compile: always remove receiver type from instantiated method values + 2021-08-23 8486ced8b0 cmd/compile: copy captured dictionary var to local var + 2021-08-23 aeec6dbfe0 spec: add example for method value in case of embedded method + 2021-08-23 f457ecc7f0 cmd/compile: fixing 15.go for -G=3 + 2021-08-23 f1d8ea1da3 reflect: fix memmove for big endian cases with new ABI + 2021-08-23 4fbb5c8666 go/types: use TypeList in the Inferred struct + 2021-08-23 7a6d64fed6 go/types: use a TypeList type to hold type arguments + 2021-08-23 2438660602 go/types: use []*TypeParam rather than []*TypeName type param lists + 2021-08-23 9fe5c7f122 go/types: add the TypeParam.Obj method + 2021-08-23 baf2866956 go/types: move to an opaque environment for Instantiate + 2021-08-23 c7e354d9d1 go/types: return an error from Instantiate + 2021-08-23 c1a14781ec runtime: remove unused cpu architecture feature variables from binaries + 2021-08-23 457418b475 cmd/go: fix long test builders + 2021-08-22 86ee89225a strings: smarter growth of temporal buffer and avoid copying on return + 2021-08-22 29d7e5472b go/types: report argument type for unsafe.OffsetOf + 2021-08-22 8fcc614360 cmd/compile/internal/types2: enable TestSelection API test + 2021-08-22 5d5e50c3db os/user: simplify test skip for plan9 + 2021-08-22 5d0c2840da cmd/compile/internal/types2: report argument type for unsafe.OffsetOf + 2021-08-22 19585826fa math/big: clarified doc string for SetMantExp + 2021-08-22 6416bde023 runtime: use asmcgocall_no_g when calling sigprocmask on openbsd + 2021-08-22 bd6845965c reflect: add example for FieldByIndex + 2021-08-22 96d816c574 runtime: fix buckHashSize duplication + 2021-08-21 6e50991d2a strconv: reject surrogate halves in Unquote + 2021-08-21 8fff20ffeb cmd/compile: absorb NEG into branch when possible on riscv64 + 2021-08-21 bcd146d398 cmd/compile: convert branch with zero to more optimal branch zero on riscv64 + 2021-08-21 dcee007aad cmd/compile: sort regalloc switch by architecture + 2021-08-21 e17439e087 go/types: don't override x.mode before using it + 2021-08-21 c9912780ab cmd/compile: enable -G=3 by default + 2021-08-20 97d17dc023 test/typeparam: add a test case for issue46591 + 2021-08-20 835ff47c16 cmd/internal/buildid: reject empty id + 2021-08-20 f67e31d643 test: enable regabi test on arm64 + 2021-08-20 ab9aaf46ee cmd/compile/internal/syntax: add PosBase.Trimmed + 2021-08-20 5045477be8 net/http: fix typo in header.go + 2021-08-20 0f25251127 go/types: change Checker.verify to return an error + 2021-08-20 30a423eb39 go/types: no need to validate substituted instances + 2021-08-20 e49775e057 go/types: consolidate verification logic + 2021-08-20 4d00fcbc43 go/types: clean up panics in instantiation + 2021-08-20 bacbc33439 archive/zip: prevent preallocation check from overflowing + 2021-08-20 7007431374 crypto/rand, internal/syscall/unix: don't use getentropy on iOS + 2021-08-20 303446395d cmd/compile: use typeAndStr directly in signatslice + 2021-08-20 e9e0d1ef70 cmd/asm/internal/arch: adds the missing type check for arm64 SXTB extension + 2021-08-20 c92c2c9d62 cmd/internal/obj/arm64: disable the pre and post index formats for pseudo registers + 2021-08-19 65074a4086 cmd/dist: remove unused variables + 2021-08-19 0e598e7da4 syscall: add SyscallN + 2021-08-19 91e2e3b903 cmd/compile: prevent duplicated works in WriteRuntimeTypes + 2021-08-19 9871726c72 reflect: add test for invalid conversion + 2021-08-19 69d8fbec7a cmd/compile/internal/types2: return an error from Instantiate + 2021-08-19 3bdc1799d6 io: unexport internal methods + 2021-08-19 740f7d7370 archive/tar: unexport internal methods + 2021-08-19 c85695a117 cmd/compile: add support for //go:nointerface for -G=3 + 2021-08-18 322879d5c9 cmd/compile/internal/dwarfgen: use src.Pos.Rel{Filename,Line,Col} consistently + 2021-08-18 687f2acf6a cmd/compile: only use dictionaries for conversions to type parameters + 2021-08-18 eda3de0f79 cmd/compile/internal/types2: change Checker.verify to return an error + 2021-08-18 805d38a352 cmd/compile/internal/types2: no need to validate substituted instances + 2021-08-18 c2bd9ee2db cmd/compile: only sort methods/interfaces during export for -d=unifiedquirks + 2021-08-18 8f0578ef39 cmd/compile/internal/types2: consolidate verification logic + 2021-08-18 165ebd85a7 cmd/compile/internal/types2: clean up panics in instantiation + 2021-08-18 4a0fd73ead cmd/go/internal/work/exec: throw an error when buildP is negative + 2021-08-18 0c83e01e0c cmd/go/testdata/script: fix test script added by CL 334873 + 2021-08-18 8b471db71b path/filepath: change IsAbs to treat \\host\share as an absolute path + 2021-08-18 946e2543f8 runtime: use RDCYCLE for cputicks on riscv64 + 2021-08-18 8e18428e38 cmd/internal/obj/arm64: don't use REGTMP when moving C_AACON2 to a register + 2021-08-18 aef24d8f7d cmd/internal/obj/arm64: fix the encoding error when operating with ZR + 2021-08-17 ddfcc02352 cmd/link: do not use GO_LDSO when cross compile + 2021-08-17 a2a9a7b513 cmd/go: make mod init disallow invalid major version suffixes + 2021-08-17 3848488f0f cmd/go/internal/test: add an all sentinel to -vet + 2021-08-17 ace1730a41 cmd/go: go test flag -failfast should be cacheable + 2021-08-17 0f85b0c0e1 go/types: fix method lookup for type-parameter based types + 2021-08-17 9d9e3291fa cmd/compile/internal/types2: fix method lookup for type-parameter based types + 2021-08-17 cf12b0d1f9 cmd/trace: use newTaskDesc to create taskDesc + 2021-08-17 3001b0abf0 cmd/link: remove elfwritedynentsym + 2021-08-17 4012fea822 all: fix typos + 2021-08-17 b7b790a71a cmd/compile: fix CONVIFACE case converting interface to empty interface + 2021-08-17 a304273d74 cmd/compile/internal/types2: allow composite literals of type parameter type + 2021-08-17 d3deb2c359 cmd/compile: fix typos + 2021-08-17 29ec74fb82 go/types: check if the interface is already complete in Complete + 2021-08-17 91a935ea0f Revert "go/types: make Interface.Complete a no-op" + 2021-08-17 a8d39f151d src: simplify race.bash checking condition + 2021-08-17 1951afc919 cmd/compile: lowered MulUintptr on riscv64 + 2021-08-16 2a19333716 net: reduce allocations for UDP send/recv on Windows + 2021-08-16 9c5eb16f6c net: reduce allocation size in ReadFromUDP + 2021-08-16 d9349175ad net: remove allocation from UDPConn.WriteTo + 2021-08-16 8ff16c1990 runtime: accept restartable sequence pcdata values in isAsyncSafePoint + 2021-08-16 df9c5d8f5d cmd/cgo: fix unused parameter warnings in generated _cgo_main.c + 2021-08-16 213e157d3a testing/fstest: allow specifying file for "." in MapFS + 2021-08-16 c04a32e59a net: avoid memory copy calling absDomainName + 2021-08-16 6406227d71 runtime: skip sysmon workaround on NetBSD >= 9.2 + 2021-08-16 a05a7d49a9 cmd/go: address code review comments in test cgo_path_space_quote + 2021-08-16 54ce8793a8 cmd: update x/tools and remove copy of txtar + 2021-08-16 742dcba7bb cmd: support space and quotes in CC and CXX + 2021-08-16 41d991e4e1 cmd/internal/str: add utilities for quoting and splitting args + 2021-08-16 4466141822 cmd/go: add document -json in testflag + 2021-08-16 8d2066177d cmd/go/internal/modfetch/codehost: refactor gitRepo.loadRefs to be harder to misuse + 2021-08-16 ec27168712 net/http: drop headers with invalid keys in Header.Write + 2021-08-16 d35035f84e go/types: use the orig object for Named.Obj + 2021-08-16 ddffe30a21 go/types: rename TypeParams to TParamList + 2021-08-16 631af58e20 go/types: remove targs from substMap + 2021-08-16 d1ba047edf go/types: simplify Named.under + 2021-08-16 56a919f17f go/types: define Identical for instances + 2021-08-16 ff36d11470 go/types: merge Instantiate and InstantiateLazy + 2021-08-16 2460cf8602 go/types: remove Named.SetTArgs + 2021-08-16 281ed619f8 go/types: parameterized functions must have a body + 2021-08-16 aab1d1fcb9 go/types: expand is only required for *Named types + 2021-08-16 9ff61acbd7 go/types,types2: superficial changes to align types and types2 + 2021-08-16 fda8ee8b07 go/types: make Interface.Complete a no-op + 2021-08-16 e61d1445ab cmd/compile: fix panic with dead hidden closures + 2021-08-16 5c7a460a1c syscall: hoist Getsockname out of NetlinkRIB loops + 2021-08-16 850768bbc9 time: update current time comment + 2021-08-16 a0adf91d85 internal/syscall/unix: change Ioctl arg type to unsafe.Pointer on AIX + 2021-08-16 5a40100141 cmd/compile: fix dictionaries for nested closures + 2021-08-16 c92f5ee170 cmd/link: start at address 0 when external linking + 2021-08-16 5da2010840 doc: start draft of go1.18 release notes, move go1.17 to x/website + 2021-08-16 ea8298e2f5 cmd/compile/internal/ssa: delete unused code + 2021-08-16 fe489c86a7 go/types: limit termlist lengths + 2021-08-16 b9f135d98f go/types: change types2.Union API to accept a list of Terms + 2021-08-16 c2b4ec8f49 go/types: add defined type to term/termlist tests + 2021-08-16 11a43df461 go/types: minor cleanup of writeTParamList + 2021-08-16 b0fba64ef4 go/types: fix make with type parameter argument + 2021-08-16 efd206eb40 cmd/compile: intrinsify Mul64 on riscv64 + 2021-08-16 7b7d7d7818 go/types: fix range over exprs of type parameter type + 2021-08-16 02f932e173 go/types: better names for things (cleanup) + 2021-08-16 a192ef8ac4 go/types: cleanup panic calls + 2021-08-16 11a1f37b07 go/types: remove TestIncompleteInterfaces (cleanup) + 2021-08-16 0b61dc4577 go/types: remove unused gcCompatibilityMode flag (cleanup) + 2021-08-16 c88e3ff648 cmd/compile/internal/types2: use the underlying TypeParam in assignableTo + 2021-08-16 d043c8ea89 go/types: implement type sets with term lists + 2021-08-16 94002f6fca go/types: implement term lists + 2021-08-16 3d679c6554 syscall: use correct type for TIOCSPGRP/TIOCGPGRP + 2021-08-16 fcdc3c098c runtime: make asmcgocall g0/gsignal checks consistent + 2021-08-16 160d797260 runtime: correct mips64 asmcgocall signal stack behaviour + 2021-08-16 6a760d6c36 runtime: include pthread.h in defs_openbsd.go + 2021-08-16 7aa57a9687 runtime: remove unused getrlimit on linux/riscv64 + 2021-08-16 57c115e1f6 crypto/sha{256,512}: unname result parameters for consistency + 2021-08-15 717894cf80 cmd/compile/internal/types2: better error message for index syntax error (follow-up) + 2021-08-15 6ed9463133 cmd/compile/internal/syntax: better error message for index syntax error + 2021-08-15 48dfddbab3 lib/time: fix RFC 6557 url + 2021-08-15 1162aae0ad time/tzdata: update links in comment + 2021-08-14 ff3469b1c2 cmd/dist: remove tests using the typeparams build tag + 2021-08-14 0a0a160d4d sync/atomic: fix documentation for CompareAndSwap + 2021-08-14 49c688e45c cmd/compile/internal/types2: rename TypeParams to TParamList + 2021-08-14 b2253c8041 cmd/compile/internal/types2: remove targs from substMap + 2021-08-14 456759b246 cmd/compile/internal/types2: use the orig object for Named.Obj + 2021-08-14 2d250043b4 cmd/compile/internal/types2: simplify Named.under + 2021-08-14 50f4ebbdd3 cmd/compile/internal/types2: define Identical for instances + 2021-08-14 fc27eb50ff cmd/compile/internal/types2: merge Instantiate and InstantiateLazy + 2021-08-13 7eaabae84d net: update IP.String doc to reflect RFC 5952 conformance + 2021-08-13 58490972c0 cmd/link: fix dead reference link + 2021-08-13 a95f1b51be test: change issue10441.go from "build" to "compile" + 2021-08-13 89a4f99640 lib/time: fix tz-link ftp url + 2021-08-13 2eb4d68833 runtime: don't use systemstack for BeforeFork/AfterFork + 2021-08-13 bad1fc1265 test: add test case for CL 340609 + 2021-08-13 641e8bc2c7 test: add test case that caused a gofrontend compiler crash + 2021-08-13 98f3d7fecb all: gofmt more (but vendor, testdata, and top-level test directories) + 2021-08-13 20a620fd9f runtime: drop SIGPROF while in ARM < 7 kernel helpers + 2021-08-13 4c8ffb3baa cmd/internal/str: move package from cmd/go/internal/str + 2021-08-13 4be75faa3e cmd/go: make fewer 'go mod' commands update go.mod + 2021-08-13 1fffeddfe9 cmd/go: add -testsum flag to update go.sum in script tests + 2021-08-12 0d01934094 Merge "all: REVERSE MERGE dev.typeparams (4d3cc84) into master" + 2021-08-12 044ec4fa98 time: fix docs for new comma layouts + 2021-08-12 3601aedff6 all: REVERSE MERGE dev.typeparams (4d3cc84) into master + 2021-08-12 4d3cc84774 Merge "[dev.typeparams] all: merge master (46fd547) into dev.typeparams" into dev.typeparams + 2021-08-12 a64ab8d3ec [dev.typeparams] all: merge master (46fd547) into dev.typeparams + 2021-08-12 7e9f911ec4 [dev.typeparams] cmd/compile: remove some shape checks in type substituter, other cleanups + 2021-08-12 46fd547d89 internal/goversion: update Version to 1.18 + 2021-08-12 5805efc78e doc/go1.17: remove draft notice + 2021-08-12 39634e7dae CONTRIBUTORS: update for the Go 1.17 release + 2021-08-12 095bb790e1 os/exec: re-enable LookPathTest/16 + 2021-08-12 677dfe5ad6 [dev.typeparams] cmd/compile: don't print out node pointer in ir.Dump + 2021-08-11 dea23e9ca8 src/make.*: make --no-clean flag a no-op that prints a warning + 2021-08-11 8ab59d812a [dev.typeparams] cmd/compile: change export version to 1.17 for testing + 2021-08-11 d7d4f28a06 [dev.typeparams] runtime, internal/bytealg: remove regabi fallback code on AMD64 + 2021-08-11 eeb7899137 [dev.typeparams] internal/buildcfg: always enable regabi on AMD64 + 2021-08-11 d4c0ed26ac doc/go1.17: linker passes -I to extld as -Wl,--dynamic-linker + 2021-08-11 0888a8cd2d [dev.typeparams] cmd/compile/internal/types2: remove unused TypeParam.Bound method + 2021-08-10 7308d747e7 [dev.typeparams] cmd/compile/internal/types2: remove Named.SetTArgs + 2021-08-10 0f34a92df7 [dev.typeparams] go/types: don't expose the TypeSet API for 1.18 + 2021-08-10 40ba119e3f [dev.typeparams] cmd/compile: keep export format unchanged if no type params are exported + 2021-08-10 fb8579746c [dev.typeparams] internal/goexperiment: update comment for RegabiArgs requirements + 2021-08-10 2e250cc957 [dev.typeparams] cmd: update vendored golang.org/x/tools to 337cebd2c151 + 2021-08-10 2fbf6aafe7 [dev.typeparams] cmd/compile: handle interface type parameters in type switches + 2021-08-10 e4cfa2f6da [dev.typeparams] cmd/compile/internal/types2: parameterized functions must have a body + 2021-08-10 508624f359 [dev.typeparams] cmd/compile/internal/types2: expand is only required for *Named types + 2021-08-10 1f9c9d8530 doc: use "high address/low address" instead of "top/bottom" + 2021-08-09 f5f79c47f9 [dev.typeparams] cmd/compile: use types2.Constraint() rather than types2.Bound() + 2021-08-09 f1dce319ff cmd/go: with -mod=vendor, don't panic if there are duplicate requirements + 2021-08-09 9f4d6a8359 [dev.typeparams] cmd/compile: call transformArgs before early typecheckaste in noder + 2021-08-09 ca3c6985cd [dev.typeparams] cmd/compile: implement generic type switches + 2021-08-09 57668b84ff [dev.typeparams] cmd/compile: simplify interface conversions + 2021-08-09 7aeaad5c86 runtime/cgo: when using msan explicitly unpoison cgoCallers + 2021-08-08 507cc341ec doc: add example for conversion from slice expressions to array ptr + 2021-08-07 d10a904712 [dev.typeparams] cmd/compile: don't export/import type parameter indices anymore + 2021-08-07 891547e2d4 doc/go1.17: fix a typo introduced in CL 335135 + 2021-08-06 8eaf4d16bc make.bash: do not overwrite GO_LDSO if already set + 2021-08-06 63b968f4f8 doc/go1.17: clarify Modules changes + 2021-08-06 9e0ac72d68 [dev.typeparams] cmd/compile/internal/types2: remove Interface.Complete (cleanup) + 2021-08-06 9bd1817e41 [dev.typeparams] cmd/compile/internal/types2: limit termlist lengths + 2021-08-06 313924f272 [dev.typeparams] cmd/compile: swap export order of union term components (cleanup) + 2021-08-06 0d7dc417ea [dev.typeparams] cmd/compile: change types2.Union API to accept a list of Terms + 2021-08-06 09d82689ed [dev.typeparams] cmd/compile/internal/types2: add defined type to term/termlist tests + 2021-08-06 3a9fd99849 [dev.typeparams] cmd/compile/internal/syntax: cleanup panic calls + 2021-08-06 c3b57af8bc [dev.typeparams] cmd/compile/internal/types2: minor cleanup of writeTParamList + 2021-08-06 0811108670 [dev.typeparams] cmd/compile/internal/types2: fix make with type parameter argument + 2021-08-06 93285c89d1 [dev.typeparams] cmd/compile/internal/types2: fix range over exprs of type parameter type + 2021-08-06 5aac85ad5e [dev.typeparams] cmd/compile/internal/types2: better names for things (cleanup) + 2021-08-06 110343e4a2 [dev.typeparams] cmd/compile: cleanup wrapper code for generics + 2021-08-06 5e33d11e10 [dev.typeparams] cmd/compile: do transformCall with non-shape type of call + 2021-08-06 ac78501b9c [dev.typeparams] cmd/compile: make sure closures inside generic funcs are not compiled + 2021-08-06 70546f6404 runtime: allow arm64 SEH to be called if illegal instruction + 2021-08-05 fd45e267c2 runtime: warn that KeepAlive is not an unsafe.Pointer workaround + 2021-08-05 f78d538858 [dev.typeparams] cmd/compile/internal/types2: cleanup panic calls + 2021-08-05 c5b6c36ddd [dev.typeparams] cmd/compile/internal/types2: remove TestIncompleteInterfaces (cleanup) + 2021-08-05 f14908d01b [dev.typeparams] cmd/compile/internal/types2: remove unused gcCompatibilityMode flag (cleanup) + 2021-08-05 bb5608dd5d [dev.typeparams] cmd/compile/internal/types2: implement type sets with term lists + 2021-08-05 6dadee759c [dev.typeparams] cmd/compile: unified importReader receiver name to r + 2021-08-05 5dcb5e2cea [dev.typeparams] cmd/compile: dictionary/shape cleanup + 2021-08-05 3cdf8b429e [dev.typeparams] cmd/compile: fixing case where type arg is an interface + 2021-08-04 1b708c0260 [dev.typeparams] go/types: remove a stale comment (cleanup) + 2021-08-04 0ec2a8b42d [dev.typeparams] go/types: switch the TArgs API to NumTArgs/TArg + 2021-08-04 e5fe769be1 [dev.typeparams] cmd/compile/internal/types2: implement term lists + 2021-08-04 b730a26729 [dev.typeparams] cmd/compile: put shape types in their own package + 2021-08-04 e590cb64f9 [dev.typeparams] runtime: handle d.link carefully when freeing a defer + 2021-08-04 6e738868a7 net/http: speed up and deflake TestCancelRequestWhenSharingConnection + 2021-08-04 d27a889119 [dev.typeparams] go/types: move instance.go contents into named.go (cleanup) + 2021-08-04 b01e775e9c [dev.typeparams] go/types: print constraint info for type param operands + 2021-08-04 3efc8f9a8d [dev.typeparams] go/types: (TypeParam) SetBound -> SetConstraint + 2021-08-04 ed3667d079 [dev.typeparams] go/types: use type terms to represent unions + 2021-08-04 880ab6209e [dev.typeparams] cmd/compile/internal/types2: fix a panic in missingMethod + 2021-08-04 5b51cf47dc [dev.typeparams] go/types: implement type terms + 2021-08-04 e0d0907212 [dev.typeparams] go/types: use comparable bit rather than ==() method + 2021-08-04 18e0503724 [dev.typeparams] go/types: embedded type cannot be a (pointer to) a type parameter + 2021-08-04 89897473e2 [dev.typeparams] go/types: implement TypeParam.Constraint + 2021-08-04 1ea3596b41 [dev.typeparams] go/types: adjust unsafe.Alignof/Offsetof/Sizeof + 2021-08-03 88bd92bb6d [dev.typeparams] runtime: simplify freedefer + 2021-08-03 1a0630aef4 [dev.typeparams] runtime,cmd/compile,cmd/link: replace jmpdefer with a loop + 2021-08-03 077925e2b0 [dev.typeparams] runtime: remove unnecessary split-prevention from defer code + 2021-08-03 7ab8754029 [dev.typeparams] cmd/compile: avoid redundant method wrappers in unified IR + 2021-08-03 fe73f28dc5 [dev.typeparams] cmd/compile: set sym.Def to ir.Name for method value wrappers + 2021-08-03 656f0888b7 [dev.typeparams] cmd/compile: make softfloat mode work with register ABI + 2021-08-03 1b193598b3 [dev.typeparams] cmd/compile: fail early on unexpected types2.Invalid + 2021-08-02 e56234a305 [dev.typeparams] cmd/compile: simple shape cleanups + 2021-08-02 c3c19731a9 [dev.typeparams] cmd/compile/internal/types2: move instance.go contents into named.go (cleanup) + 2021-08-02 156eeb40a6 [dev.typeparams] cmd/compile: make HasShape() more efficient by implementing with a type flag + 2021-08-02 8a7ee4c51e io/fs: don't use absolute path in DirEntry.Name doc + 2021-08-02 283991bd7f [dev.typeparams] cmd/compile/internal/types2: print constraint info for type param operands + 2021-08-02 aa3d54da07 [dev.typeparams] runtime: rewrite softfloat functions to avoid using floats + 2021-07-31 b8ca6e59ed all: gofmt + 2021-07-31 0b8a9ccb25 [dev.typeparams] cmd/compile: make all pointer types have the same shape + 2021-07-30 7bed50e667 [dev.typeparams] Revert "[dev.typeparams] runtime: remove unnecessary split-prevention from defer code" + 2021-07-30 e3e9f0bb2d [dev.typeparams] Revert "[dev.typeparams] runtime,cmd/compile,cmd/link: replace jmpdefer with a loop" + 2021-07-30 40e561d933 [dev.typeparams] cmd/compile: allow types with the same underlying type to have the same shape + 2021-07-30 fd0011dca5 [dev.typeparams] runtime,cmd/compile,cmd/link: replace jmpdefer with a loop + 2021-07-30 53fd5b1b77 [dev.typeparams] runtime: remove unnecessary split-prevention from defer code + 2021-07-30 ea94e5d3c5 [dev.typeparams] runtime: use func() for deferred functions + 2021-07-30 b7a85e0003 net/http/httputil: close incoming ReverseProxy request body + 2021-07-30 4480e3b11a [dev.typeparams] go/types: backport lazy loading changes from CL 336252 + 2021-07-30 27283d208f [dev.typeparams] cmd/compile: remove now-unneeded check for '==' method for comparable type + 2021-07-29 3e7571f6ff [dev.typeparams] go/types,cmd/compile/internal/types2: fix TypeParams.At docs + 2021-07-29 1d35d8ffa5 [dev.typeparams] cmd/compile: switch unified IR from TypeParam.Bound to TypeParam.Constraint + 2021-07-29 35dbdda2fe [dev.typeparams] cmd/compile: remove remaining uses of Unshapify + 2021-07-29 600b7b431b [dev.typeparams] cmd/compile: handle meth expressions on typeparams + 2021-07-29 5ecbd811b5 [dev.typeparams] cmd/compile/internal/types2: (TypeParam) SetBound -> SetConstraint + 2021-07-29 46cc686381 [dev.typeparams] cmd/compile/internal/types2: use the TParams API consistently + 2021-07-29 27552e9172 [dev.typeparams] cmd/compile: set type parameter indices when they are bound + 2021-07-29 af903261e7 [dev.typeparams] go/types, types2: remove instance.verify field (cleanup) + 2021-07-29 c079b6baaa [dev.typeparams] cmd/compile/internal/types2: trigger verification while resolving instance + 2021-07-29 ff0c0dbca6 [dev.typeparams] cmd/compile/internal/types2: use type terms to represent unions + 2021-07-29 2fa8f00915 [dev.typeparams] cmd/compile/internal/types2: implement type terms + 2021-07-29 f4f503e0a3 [dev.typeparams] cmd/compile: implement generic .(T) operations + 2021-07-29 70fd4e47d7 runtime: avoid possible preemption when returning from Go to C + 2021-07-28 4a47e40a14 [dev.typeparams] cmd/compile: don't export blank functions in unified IR + 2021-07-28 506fd520d5 [dev.typeparams] cmd/compile: don't compile blank functions + 2021-07-28 adedf54288 [dev.typeparams] test: rename blank functions + 2021-07-28 5355753009 [dev.typeparams] test/typeparam: gofmt -w + 2021-07-28 473e493d18 [dev.typeparams] cmd/compile/internal/types2: merge instance and Named to eliminate sanitization + 2021-07-28 e00a6ec084 [dev.typeparams] cmd/compile: mark methods of instantiated interface types as used + 2021-07-27 c751e2e6ba [dev.typeparams] cmd/compile/internal/types2: use comparable bit rather than ==() method + 2021-07-27 5d8f90f904 [dev.typeparams] cmd/compile: don't need to unshapify append calls + 2021-07-27 cb14e673ec [dev.typeparams] runtime: don't keep stack uintptr across potential stack move + 2021-07-26 37d2219960 [dev.typeparams] cmd/compile/internal/types2: embedded type cannot be a (pointer to) a type parameter + 2021-07-26 d6753fd491 [dev.typeparams] cmd/compile/internal/types2: implement TypeParam.Constraint + 2021-07-26 9e3274bb3d [dev.typeparams] cmd/compile/internal/types2: import regexp/syntax instead of cmd/compile/internal/syntax + 2021-07-26 b93f646125 [dev.typeparams] cmd/compile/internal/types2: fix a bug in package qualification logic + 2021-07-26 996b0dbc65 [dev.typeparams] all: merge master (ecaa681) into dev.typeparams + 2021-07-26 bfcb7c4c8a [dev.typeparams] cmd/compile: fix unified IR support for //go:nointerface + 2021-07-24 b27c7e30dc [dev.typeparams] cmd/compile: fix HasShape, add dottype test + 2021-07-24 a2e2b0362b [dev.typeparams] transformDot() should set Selection and tc flag for added ODOTs + 2021-07-24 3dc0a0a2c5 [dev.typeparams] cmd/compile: get rid of concretify use for bounds. + 2021-07-24 77e0bf294c [dev.typeparams] cmd/compile: introduce OCONVIDATA op + 2021-07-24 9f928f9318 [dev.typeparams] go/types, types2: set tset when constructing interfaces in the universe + 2021-07-23 6992dcdad9 [dev.typeparams] cmd/compile: fix some issues with cons.go + 2021-07-23 e6d956e1c5 [dev.typeparams] cmd/compile: add CONVIFACE nodes for return values during noder2 + 2021-07-23 02c0172500 [dev.typeparams] cmd/compile: add dictionary entries for itab conversion + 2021-07-23 12866bd8ea [dev.typeparams] Add CONVIFACE nodes in noder2, where possible + 2021-07-23 4cdc65d32a [dev.typeparams] cmd/compile/internal/types: format union types + 2021-07-22 244267e8c4 Merge "[dev.typeparams] all: merge master (798ec73) into dev.typeparams" into dev.typeparams + 2021-07-22 d8ceb133ca [dev.typeparams] runtime: mark TestGcSys as flaky + 2021-07-22 a27e325c59 [dev.typeparams] all: merge master (798ec73) into dev.typeparams + 2021-07-22 5cb84f0604 [dev.typeparams] cmd/compile: make sure types added to the dictionary are instantiated correctly + 2021-07-22 73162a54c2 [dev.typeparams] cmd/compile: remove outdate TODO in escape analysis + 2021-07-22 80127a7dfe [dev.typeparams] cmd/compile/internal/types2: adjust unsafe.Alignof/Offsetof/Sizeof + 2021-07-22 fca3e5c445 [dev.typeparams] cmd/compile: fix missing condition in usemethod + 2021-07-22 5ba06495c1 [dev.typeparams] go/types: use the TParams API consistently + 2021-07-22 6f57139c7a [dev.typeparams] go/types: set type parameter indices when they are bound + 2021-07-22 311baf65f4 [dev.typeparams] test: cleanup 'go env' and -goexperiment + 2021-07-22 b7149b781f [dev.typeparams] go/types: trigger verification while resolving instance + 2021-07-22 61f69d2559 [dev.typeparams] go/types: merge instance and Named to eliminate sanitization + 2021-07-22 8e9109e95a [dev.typeparams] Fix problem with 14.go + 2021-07-22 ee20dff27d [dev.typeparams] Get dictionaryCapture.go working. + 2021-07-21 4e6836e82c [dev.typeparams] Fix the types of the OFUNCINST nodes in noder2 + 2021-07-21 dcc8350ad3 [dev.typeparams] cmd/compile: handle ++/-- in noder2 for operands with generic type + 2021-07-21 f19e49e7b1 [dev.typeparams] cmd/compile: added a builtins.go test, fixed one bug + 2021-07-21 e6a2cf233f [dev.typeparams] cmd/compile: get runtime stuff working + 2021-07-21 4a97fe8c22 [dev.typeparams] cmd/compile: avoid adding incorrectly instantiated types to the dictionary + 2021-07-21 2fe4b14795 [dev.typeparams] cmd/compile: ensure methods of generic types survive linker pruning + 2021-07-21 73af5f718f [dev.typeparams] cmd/compile: disable failing generic tests + 2021-07-21 a7a17f0ca8 [dev.typeparams] cmd/compile: introduce named gcshape types + 2021-07-21 897970688b [dev.typeparams] cmd/compile: cleanup unified IR file format a little + 2021-07-21 d5f6ba943c [dev.typeparams] test: add regression test for go/defer wrapper + 2021-07-20 6a931673f0 [dev.typeparams] cmd/compile: add base.Assertf{,At} functions + 2021-07-20 e4994e71fb [dev.typeparams] all: merge master (c8f4e61) into dev.typeparams + 2021-07-19 c6d3d0b0ad [dev.typeparams] go/types: fix the type parameter index in applyTypeFunc + 2021-07-19 6bf2667d4e [dev.typeparams] go/types: more consistent handling of predeclared "any" + 2021-07-19 7e714f448e [dev.typeparams] go/types: embedding stand-alone type parameters is not permitted + 2021-07-19 82f875d735 [dev.typeparams] go/types: fix generic type indirection + 2021-07-19 62f6f130fe [dev.typeparams] go/types: interface identity must consider full type set + 2021-07-19 baeabf3b36 [dev.typeparams] go/types: cleanups around receiver type checks + 2021-07-19 b3d91e3a24 [dev.typeparams] go/types: implement delete(m, k) where m is of type parameter type + 2021-07-19 cf7e66b7d4 [dev.typeparams] go/types: implement close(ch) where ch is of type parameter type + 2021-07-19 19b4142f24 [dev.typeparams] go/types: implement ch <- x where ch is of type parameter type + 2021-07-19 d6d7f8458e [dev.typeparams] go/types: implement <-ch where ch is of type parameter type + 2021-07-19 796ac6d5f2 [dev.typeparams] go/types: move methods on *Named into named.go + 2021-07-19 22f39ba208 [dev.typeparams] go/types: use InstantiateLazy to create instance types (cleanup) + 2021-07-19 4a72be87b3 [dev.typeparams] go/types: move instantiation code to instantiate.go (cleanup) + 2021-07-19 41ff0aac13 [dev.typeparams] go/types: replace types2.Instantiate with Checker.Instantiate + 2021-07-19 9e147c55b7 [dev.typeparams] go/types: update TypeParam APIs to match types2 + 2021-07-19 22a38ba5ca [dev.typeparams] go/types: remove unnecessary guard from NewInterfaceType + 2021-07-19 b96f1b9419 [dev.typeparams] go/types: add some missing APIs for the importer + 2021-07-19 43ad1ffa99 [dev.typeparams] go/types: recursive substitution must terminate (bug fix) + 2021-07-19 c7c13ae432 [dev.typeparams] go/types: use scope numbers to identify local types + 2021-07-19 ccf95f17dd [dev.typeparams] go/types: support local defined types + 2021-07-19 76b39959f4 [dev.typeparams] go/types: don't permit method calls on ptr to type parameter receivers + 2021-07-19 9b85985d36 [dev.typeparams] Separate out gcshape types that are instantiated types + 2021-07-17 df778e6fd9 [dev.typeparams] go/types: replace optype() with under() in various cases (cleanup) + 2021-07-17 e9836fe318 [dev.typeparams] go/types: clean up index expr implementation for type parameters + 2021-07-17 c4cd76fbbb [dev.typeparams] go/types: disallow "free" type parameter as RHS of a type declaration + 2021-07-16 521828091c [dev.typeparams] go/types: move (remaining) type decls into their own files (cleanup) + 2021-07-16 624d152db7 [dev.typeparams] go/types: move Interface type decl into interface.go (cleanup) + 2021-07-16 e12d43866d [dev.typeparams] go/types: move Signature type decl into signature.go (cleanup) + 2021-07-16 b3e7f23a48 [dev.typeparams] go/types: move Struct type decl into struct.go (cleanup) + 2021-07-16 7c35f5c2fc [dev.typeparams] go/types: rename newTypeSet -> computeTypeSet + 2021-07-16 de209e693a [dev.typeparams] go/types: make Interface.obj a *TypeName + 2021-07-16 0f4198b5e2 [dev.typeparams] go/types: delay interface check for type bounds + 2021-07-16 5f50a6442e [dev.typeparams] go/internal/typeparams: remove the Enabled guard + 2021-07-16 726ffce659 [dev.typeparams] go/types: "comparable" must not be visible before Go 1.18 + 2021-07-16 79955155e9 [dev.typeparams] go/types: move newTypeSet function into typeset.go + 2021-07-16 fe4f13404d [dev.typeparams] go/types: move embedding positions from Checker to Interface + 2021-07-16 b98b8b9b5b [dev.typeparams] go/types: remove unused *Checker arguments (cleanup) + 2021-07-16 fce6290e0a [dev.typeparams] go/types: remove typeparams wrappers and aliases + 2021-07-16 24f9eb2de3 [dev.typeparams] go/types: introduce type set abstraction for interfaces + 2021-07-16 b296e54618 [dev.typeparams] go/types: port lazy import resolution from types2 + 2021-07-16 10c8b7c1d7 [dev.typeparams] cmd/compile: use dictionary to convert arguments of ==, != to interfaces + 2021-07-16 ed9e109dc9 [dev.typeparams] cmd/compile: fix small -G=3 issues for tests disabled in run.go + 2021-07-16 3d8453e00e [dev.typeparams] cmd/compile/internal/types2: more consistent handling of predeclared "any" + 2021-07-16 334f2fc045 [dev.typeparams] go/*: switch from ListExpr to MultiIndexExpr + 2021-07-15 6b85a218b8 [dev.typeparams] cmd/compile: make TestUnifiedCompare insensitive to default -G level + 2021-07-14 4ff0e04c2e [dev.typeparams] cmd/compile/internal/types2: embedding stand-alone type parameters is not permitted + 2021-07-14 3a047326e8 [dev.typeparams] cmd/compile/internal/types2: fix generic type indirection + 2021-07-14 dd8bdf4a1f [dev.typeparams] cmd/compile/internal/types2: interface identity must consider full type set + 2021-07-14 2a8087817c [dev.typeparams] cmd/compile/internal/types2: cleanups around receiver type checks + 2021-07-14 95f8e64fc0 [dev.typeparams] cmd/compile/internal/types2: implement delete(m, k) where m is of type parameter type + 2021-07-14 5f0ea40c67 [dev.typeparams] cmd/compile/internal/types2: implement close(ch) where ch is of type parameter type + 2021-07-14 6511922a14 [dev.typeparams] cmd/compile/internal/types2: implement ch <- x where ch is of type parameter type + 2021-07-14 ff33d3dc3a [dev.typeparams] cmd/compile/internal/types2: implement <-ch where ch is of type parameter type + 2021-07-14 e3e6cd3022 [dev.typeparams] cmd/compile: fix escape printout bugs for -G=3 + 2021-07-14 2b10d7ff0b [dev.typeparams] go/types: export the Config.GoVersion field + 2021-07-14 5517053d17 [dev.typeparams] cmd/compile: record more typ/fun info for dictionaries in unified IR + 2021-07-14 82744bfbfc [dev.typeparams] cmd/compile: handle objStub earlier in reader + 2021-07-13 e5faa8d84b [dev.typeparams] cmd/compile/internal/types2: move methods on *Named into named.go (cleanup) + 2021-07-13 d0324eb8fb [dev.typeparams] cmd/compile/internal/types2: use InstantiateLazy to create instance types (cleanup) + 2021-07-13 70f1246a9f [dev.typeparams] cmd/compile/internal/types2: move instantiation code to instantiate.go (cleanup) + 2021-07-13 22e9265467 [dev.typeparams] cmd/compile/internal/types2: replace types2.Instantiate with Checker.Instantiate + 2021-07-12 1c783dc148 [dev.typeparams] Add optional sub-dict entry for typeparam bound calls + 2021-07-11 0dcab98fd8 [dev.typeparams] cmd/compile: slightly more incremental unified typecheck + 2021-07-10 3c3c1d8d28 [dev.typeparams] cmd/compile: more incremental typecheck for unified IR + 2021-07-10 a12ad27119 [dev.typeparams] cmd/compile: report functions declared in Go and assembly + 2021-07-10 5059aed9dd [dev.typeparams] internal/buildcfg: allow regabiwrappers on all GOARCH + 2021-07-09 f2ed30c31e [dev.typeparams] cmd/compile/internal/types2: recursive substitution must terminate (bug fix) + 2021-07-09 69d945fc6e [dev.typeparams] cmd/compile/internal/types2: use scope numbers to identify local types + 2021-07-09 04acb8a7b9 [dev.typeparams] cmd/compile: report mismatch between types because of //go:notinheap + 2021-07-08 2b1d70a137 [dev.typeparams] all: merge master (296ddf2) into dev.typeparams + 2021-07-08 42fe132787 [dev.typeparams] cmd/compile: cleanup ABI utils tests + 2021-07-08 d4f6d161e4 [dev.typeparams] cmd/compile: fix bunch of -G=3 bugs for test cases in test/typeparams/mdempsky + 2021-07-07 18135150b0 [dev.typeparams] cmd/compile/internal/types2: don't permit method calls on ptr to type parameter receivers + 2021-07-07 d2bf94fb86 [dev.typeparams] cmd/compile/internal/types2: replace optype() with under() in various cases (cleanup) + 2021-07-07 03ec8de24b [dev.typeparams] cmd/compile/internal/types2: clean up index expr implementation for type parameters + 2021-07-07 47547d8508 [dev.typeparams] cmd/compile/internal/types2: disallow "free" type parameter as RHS of a type declaration + 2021-07-07 60cb2cab97 [dev.typeparams] cmd/compile: fix bug with types2.Instantiate with interface type param + 2021-07-07 85267f402c [dev.typeparams] cmd/compile: move def of comparable to end of predeclared slices + 2021-07-07 c65ca97a45 [dev.typeparams] cmd/compile: fix windows longtest builder + 2021-07-07 501725032c [dev.typeparams] cmd/compile: handle derived types that are converted to interfaces + 2021-07-07 b614c05a15 [dev.typeparams] cmd/compile: add built-in name/type "comparable". + 2021-07-07 b4844c9f54 [dev.typeparams] cmd/compile: handle the (*T).M method expression with dictionaries + 2021-07-07 4676c3675e [dev.typeparams] cmd/compile: rename PartialCallType -> MethodValueType + 2021-07-07 5c42b6a953 [dev.typeparams] test: add regress tests that fail(ed) with -G=3 + 2021-07-07 49ade6b298 [dev.typeparams] test: add expected failure mechanism + 2021-07-04 cd00499c61 [dev.typeparams] cmd/compile: better Call constructor + 2021-07-04 899b158ee9 [dev.typeparams] cmd/compile: set Func.ClosureCalled in escape analysis + 2021-07-03 ea5369bac0 [dev.typeparams] cmd/compile: remove ir.CallUse + 2021-07-03 c45d0eaadb [dev.typeparams] cmd/compile: flatten OINLCALL in walk + 2021-07-03 ad2ba3ff51 [dev.typeparams] src,cmd: run 'go mod tidy' + 2021-07-03 5dac279fbd [dev.typeparams] cmd/compile: formalize "hidden parameters" idea + 2021-07-03 611056ec34 Merge "[dev.typeparams] all: merge master (912f075) into dev.typeparams" into dev.typeparams + 2021-07-02 ef39edefe1 [dev.typeparams] src,cmd: bump go.mod to 'go 1.18' + 2021-07-02 f35d86fd5f [dev.typeparams] all: merge master (912f075) into dev.typeparams + 2021-07-02 b994cc69e0 [dev.typeparams] cmd/compile: separate out creating instantiations from creating dictionaries + 2021-07-02 6dec18cc75 [dev.typeparams] cmd/compile: start using sub-dictionary entries where needed + 2021-07-02 a18726a648 [dev.typeparams] cmd/compile: incremental typecheck during unified IR + 2021-07-02 2aea44204e [dev.typeparams] cmd/compile: enable generics syntax with -lang=go1.18 + 2021-07-01 30e5f266ed [dev.typeparams] cmd/compile/internal/types2: move (remaining) type decls into their own files (cleanup) + 2021-07-01 9c1e7d9eff [dev.typeparams] cmd/compile/internal/types2: move Interface type decl into interface.go (cleanup) + 2021-07-01 838079beef [dev.typeparams] cmd/internal/dwarf: remove putInlinedFunc's callersym param + 2021-07-01 9ba294e15b [dev.typeparams] cmd/compile: fix getDictionarySym for methods references, write out sub-dictionaries + 2021-07-01 0e0b80cb56 [dev.typeparams] cmd/compile/internal/types2: move Signature type decl into signature.go (cleanup) + 2021-07-01 1aadb18f83 [dev.typeparams] cmd/compile/internal/types2: move Struct type decl into struct.go (cleanup) + 2021-07-01 fac21803ce [dev.typeparams] cmd/compile/internal/types2: rename newTypeSet -> computeTypeSet + 2021-07-01 1eb756689c [dev.typeparams] cmd/compile/internal/types2: make Interface.obj a *TypeName + 2021-07-01 9cb1b0f50b [dev.typeparams] cmd/compile/internal/types2: delay interface check for type bounds + 2021-07-01 1cd505c353 [dev.typeparams] cmd/compile/internal/types2: "comparable" must not be visible before Go 1.18 + 2021-07-01 706c580ee1 [dev.typeparams] cmd/compile: simplify autotmpname + 2021-07-01 372b312735 [dev.typeparams] cmd/compile: refactor top-level typechecking in unified IR + 2021-06-30 ad7e5b219e [dev.typeparams] all: merge master (4711bf3) into dev.typeparams + 2021-06-30 8767b87ab5 [dev.typeparams] cmd/compile: functions to create GC shape types/names for a concrete type + 2021-06-30 b47cbc2ffe [dev.typeparams] cmd/compile/internal/types2: move newTypeSet function into typeset.go + 2021-06-30 f0206e3df2 [dev.typeparams] cmd/compile/internal/types2: move embedding positions from Checker to Interface + 2021-06-30 1ff43d1b17 [dev.typeparams] cmd/compile/internal/types2: remove unused *Checker arguments (cleanup) + 2021-06-30 4b5fdb0b7a [dev.typeparams] cmd/compile/internal/types2: introduce type set abstraction for interfaces + 2021-06-30 f503740ccf [dev.typeparams] cmd/compile: add derived-type dictionaries to unified IR + 2021-06-29 6a5f7e8498 [dev.typeparams] cmd/compile: use dictionary entries for more conversion cases + 2021-06-29 5fa6bbc669 [dev.typeparams] cmd/compile: clean up instantiation and dictionary naming + 2021-06-29 dfa8fd861c [dev.typeparams] cmd/compile: add a field (method) name for function in TestABIUtilsInterfaces + 2021-06-28 64e6c75924 [dev.typeparams] cmd/compile: port fix for issue46725 to transform.go + 2021-06-28 f99b3fe2ab [dev.typeparams] cmd/compile: move MethodValueWrapper to walk + 2021-06-28 a8861b907d [dev.typeparams] cmd/compile: port CL 330838 for -G=3 + 2021-06-27 20a04f6041 [dev.typeparams] cmd/compile: delay method value wrapper generation until walk + 2021-06-27 1b995f91a5 [dev.typeparams] cmd/compile: rename OCALLPART to OMETHVALUE + 2021-06-27 d44ed5d144 [dev.typeparams] cmd/compile: add method value wrappers to unified IR + 2021-06-26 3ea0fcfe15 [dev.typeparams] cmd/compile: do not skip TestUnifiedCompare in short mode + 2021-06-26 27e3b797bb [dev.typeparams] cmd/compile: remove OCALLMETH Fatals in SSA generation + 2021-06-26 0cf71f7f92 [dev.typeparams] cmd/compile: rewrite method calls during typecheck + 2021-06-26 180c338c68 [dev.typeparams] cmd/compile: restore check for OCALLMETH in walkCall + 2021-06-26 942bcc2d4f [dev.typeparams] cmd/compile: fix wrong AST generation in devirtualization + 2021-06-26 d417b8cf87 [dev.typeparams] cmd/compile: clarify comment about checking reflect.Method in usemethod + 2021-06-25 ed647b16d0 [dev.typeparams] cmd/compile: use Type.LinkString for map keys + 2021-06-25 942edc7502 [dev.typeparams] cmd/compile: rename types.Type.{Short,Long}String to {Link,Name}String + 2021-06-25 373ca3a846 Merge "[dev.typeparams] all: merge master (37f9a8f) into dev.typeparams" into dev.typeparams + 2021-06-25 1b60284c0a [dev.typeparams] cmd/compile: simplify variable capturing in unified IR + 2021-06-25 9fe7c38d3d [dev.typeparams] cmd/compile: fix TestUnifiedCompare + 2021-06-25 f4198f85d5 [dev.typeparams] cmd/compile: generate wrappers within unified IR + 2021-06-25 3f1a517a45 [dev.typeparams] cmd/compile: refactor "need to emit" logic for types + 2021-06-25 badb98364b [dev.typeparams] cmd/compile: switch CaptureVars to use syntax.Walk + 2021-06-25 ac2de11cfb [dev.typeparams] all: merge master (37f9a8f) into dev.typeparams + 2021-06-25 2493c72742 [dev.typeparams] cmd/compile: rewrite method call into method expression during escape analysis + 2021-06-25 f190a9280d [dev.typeparams] cmd/compile: simplify usemethod + 2021-06-25 aee209c044 [dev.typeparams] cmd/compile: catch another mis-used OCALLMETH in backend + 2021-06-24 75ad323773 [dev.typeparams] test: skip -G=3 testing under GOEXPERIMENT=unified + 2021-06-24 808dca3b2d [dev.typeparams] cmd/compile: suppress liveness diagnostics of wrappers + 2021-06-24 ddb09af1b8 [dev.typeparams] cmd/compile: add derived types and subdictionaries to dictionaries + 2021-06-24 df00abc61b [dev.typeparams] cmd/compile: skip escape analysis diagnostics for wrappers + 2021-06-24 b55cc6687d [dev.typeparams] cmd/compile: use r.hasTypeParams in typIdx + 2021-06-24 9bdbf73c98 [dev.typeparams] cmd/compile: simplify writer.collectDecls + 2021-06-23 ee4fc0c1bc [dev.typeparams] Fix issues related to dictionaries and method calls with embedded fields + 2021-06-23 8165256bc2 [dev.typeparams] cmd/compile/internal/syntax: go/ast-style walk API + 2021-06-23 a72a499c24 [dev.typeparams] cmd/compile: optimize wrapping of constant arguments + 2021-06-23 eb691fdd62 [dev.typeparams] cmd/compile: escape analysis of method expression calls + 2021-06-23 0a0e3a3dea [dev.typeparams] cmd/compile: move call logic from order.go to escape + 2021-06-23 574ec1c645 [dev.typeparams] cmd/compile: desugar ORECOVER into ORECOVERFP + 2021-06-23 9be8303df9 [dev.typeparams] cmd/compile: add ORECOVERFP, OGETCALLER{PC,SP} ops + 2021-06-23 70f4ab6565 [dev.typeparams] cmd/compile: remove SetClosureCalled(false) hacks + 2021-06-23 107b1fce64 [dev.typeparams] cmd/compile: explain why expandInline needed + 2021-06-23 99732b9070 [dev.typeparams] cmd/compile: refactor escape analysis of calls + 2021-06-23 1a445dab66 [dev.typeparams] cmd/compile: remove CallExpr.PreserveClosure + 2021-06-23 e59a19cceb [dev.typeparams] cmd/compile: simplify walkGoDefer + 2021-06-23 493e177639 [dev.typeparams] cmd/compile: allow typecheck of OCHECKNIL + 2021-06-23 c4e0c652fb [dev.typeparams] cmd/compile: refactor CaptureName + 2021-06-22 62095c66e0 [dev.typeparams] go/types: adjust logic for method expression arg naming + 2021-06-22 541612b974 [dev.typeparams] cmd/gofmt: remove typeparams guards + 2021-06-22 3e6219c6a9 [dev.typeparams] cmd/compile: split package escape into multiple files + 2021-06-22 077100dfcd [dev.typeparams] cmd/compile: remove special escape analysis tags + 2021-06-22 859d903b06 [dev.typeparams] cmd/compile: add -d=unifiedquirks for quirks mode + 2021-06-22 d626ba27bb [dev.typeparams] all: merge master (16e82be) into dev.typeparams + 2021-06-21 844c076359 [dev.typeparams] cmd/compile: simplify import* functions + 2021-06-21 e57da8e53c [dev.typeparams] cmd/compile: explain why reader.funcExt need to set n.Defn + 2021-06-21 3f7f72a258 [dev.typeparams] cmd/compile: fold reader checking type params logic to separate method + 2021-06-20 d24c90a153 [dev.typeparams] cmd/compile: explain how pkgReader.typIdx handles alias cyclic + 2021-06-18 3f7a3133da [dev.typeparams] cmd/compile: add "toolstash -cmp"-like test of -d=unified + 2021-06-18 e9c01f9804 [dev.typeparams] cmd/compile: add missing copy of Field.Embedded in type substituter. + 2021-06-18 6fa0437958 [dev.typeparams] cmd/compile: add documentation for unified IR pipeline + 2021-06-18 54fe57bc22 [dev.typeparams] cmd/compile: record writer's stack at export data sync points + 2021-06-18 78aa251ace [dev.typeparams] cmd/go: include new internal packages in TestNewReleaseRebuildsStalePackagesInGOPATH + 2021-06-18 2a7900762c [dev.typeparams] go/types: report better error for invalid untyped operation + 2021-06-18 90096f445e [dev.typeparams] cmd/compile/internal/syntax: convert (most) parser tests to new type set syntax + 2021-06-17 feec53c4e5 [dev.typeparams] cmd/compile: skip types2 GC test during bootstrapping + 2021-06-17 fb84d213a8 [dev.typeparams] reflect: support big endian architectures in callMethod + 2021-06-17 9f50d9a0b4 [dev.typeparams] internal/reflectlite: remove unused ptrSize + 2021-06-17 890a8407a9 [dev.typeparams] internal/reflectlite: use goarch.PtrSize instead of the duplicated ptrSize [generated] + 2021-06-17 bfd9b63f12 [dev.typeparams] reflect: delete unused ptrSize and PtrSize + 2021-06-17 95c104ee61 [dev.typeparams] reflect: use goarch.PtrSize instead of the duplicated ptrSize [generated] + 2021-06-17 2e600fb8b3 [dev.typeparams] runtime/internal/sys: remove unused Goarch* and Goos* constants + 2021-06-17 46e1e74a86 [dev.typeparams] runtime: replace Goarch* constants with internal/goarch versions [generated] + 2021-06-17 7b0e9cae66 [dev.typeparams] runtime: replace Goos* constants with internal/goos versions [generated] + 2021-06-17 81a6a4354b [dev.typeparams] internal/goarch,internal/goos: rename Goos and Goarch constants + 2021-06-17 33d1b82d16 [dev.typeparams] runtime/internal/sys: replace ArchFamily and constants with goarch + 2021-06-17 85b12a8563 [dev.typeparams] runtime,runtime/internal/sys: remove unused BigEndian + 2021-06-17 9a93072a07 [dev.typeparams] runtime/internal/sys: replace BigEndian with goarch.BigEndian [generated] + 2021-06-17 9c58e399a4 [dev.typeparams] runtime: fix import sort order [generated] + 2021-06-17 671954e72e [dev.typeparams] runtime/internal/sys: replace GOOS with goos.GOOS + 2021-06-17 5c028751bd [dev.typeparams] runtime/internal/sys: replace uses of GOARCH with goarch.GOARCH + 2021-06-17 6d89c90fb1 [dev.typeparams] runtime/internal/sys: remove unused PtrSize + 2021-06-17 6d85891b29 [dev.typeparams] runtime: replace uses of runtime/internal/sys.PtrSize with internal/goarch.PtrSize [generated] + 2021-06-17 122f5e16d6 [dev.typeparams] internal/goarch,internal/goos: explode runtime/internal/sys into pieces + 2021-06-17 804ecc2581 [dev.typeparams] all: add GOEXPERIMENT=unified knob + 2021-06-17 b14fd720a8 [dev.typeparams] cmd/compile: make types2 report better error for invalid untyped operation + 2021-06-17 8115ae198d [dev.typeparams] go/types: disallow ~T where T is a defined type or an interface + 2021-06-17 6237e441bc [dev.typeparams] go/types: disallow type list handling + 2021-06-17 6e50f4f111 [dev.typeparams] go/types: convert testdata/check tests to type set syntax + 2021-06-17 b6fc4d01a8 [dev.typeparams] go/types: convert testdata/fixedbugs tests to type set sytax + 2021-06-17 795f4475e5 [dev.typeparams] go/types: convert testdata/examples tests to type set sytax + 2021-06-17 8e14a9cf04 [dev.typeparams] go/types: eliminate need for unpack and asUnion functions + 2021-06-17 aecfd5c29e [dev.typeparams] go/types: clean up type set/union intersection + 2021-06-17 c7a460526e [dev.typeparams] go/types: replace Sum type with Union type + 2021-06-17 e7451f6616 [dev.typeparams] go/types: accept embedded interface elements + 2021-06-17 54f854fb41 [dev.typeparams] go/parser: accept embedded type literals + 2021-06-17 ab4b3c4b15 [dev.typeparams] go/parser: accept "~" and "|" interface elements + 2021-06-17 7c5d7a4caf [dev.typeparams] go/token, go/scanner: add the "~" operator + 2021-06-17 ad59efb027 [dev.typeparams] go/ast: remove the typeparams build constraint + 2021-06-16 1ba2074440 [dev.typeparams] cmd/compile/internal/types2: support local defined types + 2021-06-16 dd95a4e3db [dev.typeparams] cmd/compile: simplify SSA devirtualization + 2021-06-16 132ea56d29 [dev.typeparams] cmd/compile: fix crawling of embeddable types + 2021-06-16 8f95eaddd3 [dev.typeparams] cmd/compile: fix missing sync implicit types + 2021-06-16 a4121d7dd6 [dev.typeparams] Revert "[dev.typeparams] runtime: make deferproc take a func() argument" + 2021-06-16 4d6f9d60cf [dev.typeparams] all: merge master (785a8f6) into dev.typeparams + 2021-06-16 ee0420d3b5 [dev.typeparams] cmd/compile: factor out implicit/explicit handling + 2021-06-15 cf1ae5fc36 [dev.typeparams] cmd/compile: add -d=unified flag to enable unified IR + 2021-06-15 79cd1687e6 [dev.typeparams] cmd/compile: unified IR construction + 2021-06-14 ea438bda85 [dev.typeparams] all: merge master (fdab5be) into dev.typeparams + 2021-06-13 8eeaf961c5 [dev.typeparams] cmd/compile: move //go:embed -lang check to noder + 2021-06-12 f1b1c2f67f [dev.typeparams] cmd/compile: simplify NewClosureFunc + 2021-06-12 db7c868307 [dev.typeparams] test: add string quoting support to test/run.go + 2021-06-12 0132b91127 [dev.typeparams] cmd/compile: refactor closure construction + 2021-06-12 8f00eb0099 [dev.typeparams] cmd/compile: avoid ir.DeepCopy in noder.constDecl + 2021-06-12 2954f11ead [dev.typeparams] cmd/compile: scaffolding for export data experiments + 2021-06-11 c93d5d1a52 [dev.typeparams] all: always enable regabig on AMD64 + 2021-06-11 2fe324858b [dev.typeparams] internal/buildcfg: always enable regabiwrappers on AMD64 + 2021-06-11 e0e9fb8aff [dev.typeparams] runtime: simplify defer record allocation + 2021-06-11 4468e1cfb9 [dev.typeparams] runtime: allow newproc split stack + 2021-06-11 ef6c5be160 [dev.typeparams] cmd/compile: fix wrapper generation for imported generics + 2021-06-11 4a735ce068 [dev.typeparams] cmd/compile: add "check" field to noder.gcimports + 2021-06-11 61888d47c4 [dev.typeparams] cmd/compile: allow embedding Type.Vargen into Sym.Name + 2021-06-11 62e32dd386 [dev.typeparams] cmd/compile: extract SetBaseTypeIndex function + 2021-06-11 18788245ea [dev.typeparams] cmd/compile: add ir.TypeNodeAt + 2021-06-09 b20747334a [dev.typeparams] cmd/compile, runtime: simplify opendefer metadata + 2021-06-09 c0a86c10f1 [dev.typeparams] cmd/compile: simplify openDeferSave + 2021-06-08 74b0b2772a [dev.typeparams] cmd/compile, runtime: remove _defer.siz field + 2021-06-08 b80a4c56f0 [dev.typeparams] runtime: allow deferproc split stack + 2021-06-08 83da32749c [dev.typeparams] runtime: make deferproc take a func() argument + 2021-06-08 8e5304f729 [dev.typeparams] cmd/compile, runtime: remove the siz argument of newproc/deferproc + 2021-06-08 00d01b5786 [dev.typeparams] runtime: remove tracebackdefers + 2021-06-08 12b37b713f [dev.typeparams] runtime: remove variadic defer/go calls + 2021-06-08 5b350505da [dev.typeparams] cmd/compile: remove variadic defer calls + 2021-06-08 a9de78ac88 [dev.typeparams] cmd/compile, runtime: always enable defer/go wrapping + 2021-06-08 e58bddde70 [dev.typeparams] internal/goexperiment: regenerate generated files + 2021-06-08 0c40cb4a07 [dev.typeparams] cmd/compile/internal/types2: provide valid signature in errors involving method expressions + 2021-06-07 74d46381b2 [dev.typeparams] cmd/compile: do extra markObjects during iexport to deal with generics + 2021-06-07 ccfb0ce8df [dev.typeparams] cmd/compile: convert generic values to interface type using dictionary + 2021-06-07 cf4b6dc48e [dev.typeparams] cmd/compile: allow conversions from type parameter to interface + 2021-06-07 bcb3927cb5 [dev.typeparams] cmd/compile: introduce IsTypeParam() helper + 2021-06-07 f0c97219a3 Merge "[dev.typeparams] all: merge master (8212707) into dev.typeparams" into dev.typeparams + 2021-06-07 201d55e637 [dev.typeparams] cmd/compile: create .dict Param in the package of the instantiated function + 2021-06-07 0e39cdc0e9 [dev.typeparams] all: merge master (8212707) into dev.typeparams + 2021-06-07 7c8a5be2d6 [dev.typeparams] go/types: factor out constraint satisfaction check + 2021-06-07 7497e57a39 [dev.typeparams] go/types: simplify Interface accessors + 2021-06-07 2f26adc232 [dev.typeparams] go/types: re-use existing code for Interface.Complete + 2021-06-07 1395952075 [dev.typeparams] go/types: add Named.SetTParams and Named.Orig methods + 2021-06-07 991dca0112 [dev.typeparams] go/types: move signature checking into separate file + 2021-06-06 c23294d6b3 [dev.typeparams] cmd/compile/internal/types2: return Universe for ((*Package)(nil)).Scope() + 2021-06-05 a5be3eaee2 [dev.typeparams] cmd/compile: refactor export writing + 2021-06-05 4c072c94dc [dev.typeparams] cmd/compile: refactor import reading + 2021-06-05 4e001a8d9e [dev.typeparams] runtime/race: make test compatible with types2 + 2021-06-05 246a5570be [dev.typeparams] cmd/compile: rename (types2.Inferred.)Targs to TArgs + 2021-06-05 692399fbaa [dev.typeparams] cmd/compile/internal/syntax: not all index expressions can be instantiated types + 2021-06-04 a94e4f5a85 [dev.typeparams] cmd/compile: point StructKeyExpr at the types.Field + 2021-06-04 bad388744b [dev.typeparams] cmd/compile: handle dictionaries for top-level instantiations + 2021-06-04 de61465156 [dev.typeparams] cmd/compile: allow inlining in instantiated functions + 2021-06-04 4cf7f5f694 [dev.typeparams] test: test regabidefers in live.go + 2021-06-04 3298c749ac [dev.typeparams] runtime: undo go'd closure argument workaround + 2021-06-04 46beeed0ac [dev.typeparams] cmd/compile: allow go'd closure to escape when compiling runtime + 2021-06-04 8e6dfe1b31 [dev.typeparams] cmd/compile: export/import of recursive generic types. + 2021-06-04 93a886a165 [dev.typeparams] go/types: move struct checking into separate file + 2021-06-04 ffc74ad5d3 [dev.typeparams] go/types: move interface checking into separate file + 2021-06-04 090a17c998 [dev.typeparams] go/types: use correct type parameter list in missingMethod + 2021-06-04 62c40878e4 [dev.typeparams] go/types: better recv Var for method expressions + 2021-06-04 e32fab145b [dev.typeparams] go/types: fix panic with nil package name + 2021-06-04 cd6e9df446 [dev.typeparams] go/types: print "incomplete" for interfaces in debug mode only + 2021-06-04 655246f99a [dev.typeparams] go/types: make TestManual work for directories + 2021-06-04 d7592ab424 [dev.typeparams] go/types: implement types.Instantiate + 2021-06-04 410fa4c75b [dev.typeparams] go/types: rename Inferred.Targs to TArgs + 2021-06-04 298149a915 [dev.typeparams] go/types: use Checker-provided type parameter IDs when possible + 2021-06-04 2175e2f573 [dev.typeparams] cmd/compile: lazy import resolution for types2 + 2021-06-03 4d2b528795 [dev.typeparams] internal/buildcfg: turn on register ABI by default on ARM64 + 2021-06-03 5f034f9b46 [dev.typeparams] internal/buildcfg: turn on regabireflect by default on ARM64 + 2021-06-03 026480d06b [dev.typeparams] cmd/compile: allow nil Syms in Sym.Less + 2021-06-03 a2d6a2caeb [dev.typeparams] internal/buildcfg: turn on regabiwrappers by default on ARM64 + 2021-06-03 55b4310acd [dev.typeparams] runtime: crash the GC at clobberdead pointer on ARM64 + 2021-06-03 6b1e4430bb [dev.typeparams] cmd/compile: implement clobberdead mode on ARM64 + 2021-06-03 1c947e4f31 [dev.typeparams] cmd/compile: properly copy tilde value for unions in types2-to-types1 conversion + 2021-06-03 e9ba0750b6 [dev.typeparams] reflect: guard abi_test.go with regabiargs build tag + 2021-06-03 28bd325e41 [dev.typeparams] runtime: use ABIInternal callbackWrap in callbackasm1 on ARM64 + 2021-06-03 3de4986852 [dev.typeparams] runtime: call cgocallbackg indirectly on ARM64 + 2021-06-03 5a40fab19f [dev.typeparams] runtime, internal/bytealg: port performance-critical functions to register ABI on ARM64 + 2021-06-03 370ff5ff96 [dev.typeparams] test: update all the typeparam tests to use the new union/tilde syntax + 2021-06-03 5a008a92e8 [dev.typeparams] internal/bytealg: call memeqbody directly in memequal_varlen on ARM64 + 2021-06-03 165d39a1d4 [dev.typeparams] test: adjust codegen test for register ABI on ARM64 + 2021-06-03 b5f37faf3b [dev.typeparams] cmd/internal/goobj: add duffzero/duffcopy to builtin list + 2021-06-03 9c054f4137 [dev.typeparams] cmd/link: take function address in assembly in TestFuncAlign + 2021-06-03 95c618e99a [dev.typeparams] cmd/compile/internal/types2: add Config.AllowTypeLists to control type list handling + 2021-06-03 10d6b36ca3 [dev.typeparams] cmd/compile/internal/types2: disallow ~T where T is a defined type or an interface + 2021-06-02 8cdce85bdf [dev.typeparams] cmd/compile/internal/types2: convert testdata/check tests to type set sytax + 2021-06-02 c790964ae4 [dev.typeparams] cmd/compile/internal/types2: convert testdata/fixedbugs tests to type set sytax + 2021-06-02 9a99e728fe [dev.typeparams] cmd/compile/internal/types2: convert testdata/examples tests to type set sytax + 2021-06-02 d36b7d7bdd [dev.typeparams] cmd/compile/internal/importer: review of gcimporter_test.go + 2021-06-02 3c1d502a19 [dev.typeparams] cmd/compile/internal/types2: eliminate need for unpack and asUnion functions + 2021-06-02 848b58e473 [dev.typeparams] cmd/compile/internal/types2: clean up type set/union intersection + 2021-06-02 97cb0113a3 [dev.typeparams] cmd/compile: fix export/import of constants with typeparam type + 2021-06-02 6b1cdeaef3 [dev.typeparams] cmd/link: include "go build" output in test logs + 2021-06-02 c7b9811581 [dev.typeparams] cmd/compile/internal/importer: review of gcimporter.go + 2021-06-02 498a48327f [dev.typeparams] cmd/compile: sort iface fields before expansion + 2021-06-02 cc52fdd1f3 [dev.typeparams] cmd/compile/internal/importer: review of exportdata.go + 2021-06-02 8c5c5a9e69 [dev.typeparams] cmd/compile/internal/importer: review of support.go + 2021-06-02 589e32dbdf [dev.typeparams] cmd/compile/internal/types2: replace Sum type with Union type + 2021-06-02 7b876def6c [dev.typeparams] cmd/compile: add dictionary argument to generic functions + 2021-06-02 aa9cfdf775 [dev.typeparams] runtime: update ABIInternal assembly with register ABI on ARM64 + 2021-06-02 0c123cdf8b [dev.typeparams] reflect: implement register ABI for MakeFunc etc. on ARM64 + 2021-06-02 2e4b79949f [dev.typeparams] runtime: implement register ABI for reflectcall on ARM64 + 2021-06-02 dc2cb529a8 [dev.typeparams] runtime: mark assembly functions called directly from compiler ABIInternal + 2021-06-02 d2b435117d test: fix error check messages for 2 types2 tests + 2021-06-02 b1f48e8add [dev.typeparams] cmd/compile: fix formatting + 2021-06-01 58ad36b359 [dev.typeparams] internal/buildcfg: allow regabi GOEXPERIMENTs on ARM64 + 2021-06-01 c3639918d1 [dev.typeparams] internal/abi: define ARM64 register ABI constants + 2021-06-01 6633dc8b09 [dev.typeparams] reflect: call ABI0 spill/unspill functions on AMD64 + 2021-06-01 e4003463ff [dev.typeparams] cmd/compile: match register-ABI version of memmove call on ARM64 + 2021-06-01 8e7abefdaa [dev.typeparams] cmd/compile: update ARM64 CALL* ops for register ABI + 2021-06-01 c9d1a2bdd2 [dev.typeparams] all: merge master (2725522) into dev.typeparams + 2021-06-01 2580e9a160 [dev.typeparams] cmd/compile: refactor noder/irgen helpers + 2021-06-01 4b10e4c547 [dev.typeparams] cmd/compile: handle ONONAME in subster.node + 2021-05-31 f32f4f58d9 [dev.typeparams] cmd/compile: simplify formatting of defined types + 2021-05-27 22f5ece3b1 [dev.typeparams] cmd/compile/internal/noder: refactor irgen import handling + 2021-05-27 417955d151 [dev.typeparams] cmd/compile/internal/inline: refactor mkinlcall + 2021-05-27 88583a2a66 [dev.typeparams] test: trim list of expected -G=3 failures + 2021-05-27 ea522bc546 [dev.typeparams] cmd/compile: add and use ir.RawOrigExpr + 2021-05-27 de5d1aca5e [dev.typeparams] cmd/compile: tweaks to match types2 + 2021-05-27 c2c1b53b39 [dev.typeparams] cmd/compile: use old export format if not compiling with generics + 2021-05-27 8c99e5db43 [dev.typeparams] cmd/compile/internal/types2: ensure that Named.check is nilled out once it is expanded + 2021-05-27 963f33b03b [dev.typeparams] cmd/compile: enable register args on ARM64 + 2021-05-27 06df0ee7fa [dev.typeparams] cmd/compile: add arg/result register load/spill code on ARM64 + 2021-05-26 1ec056244e [dev.typeparams] cmd/compile: inlining tweaks for toolstash + 2021-05-26 6da1661371 [dev.typeparams] cmd/compile: simplify inlining variadic calls + 2021-05-26 e99e9a6e01 [dev.typeparams] cmd/compile: simplify ~r/~b naming + 2021-05-26 4c68edd1fe [dev.typeparams] cmd/compile: add morestack arg spilling code on ARM64 + 2021-05-26 a4b2a04bc5 [dev.typeparams] cmd/internal/obj/arm64: use ABI-compatible registers in function prologue + 2021-05-26 4bb927f82e [dev.typeparams] cmd/compile: define ARM64 parameter registers + 2021-05-26 cf23daeda3 [dev.typeparams] cmd/compile: do not schedule in-register args late, even for block control + 2021-05-26 4ed6317e73 [dev.typeparams] cmd/compile: always generate (*T).M wrappers for instantiated methods + 2021-05-26 b7f7d1cd7b [dev.typeparams] cmd/compile: get type aliases working with generic types + 2021-05-26 95748d1b74 [dev.typeparams] cmd/compile: avoid some redundant type construction + 2021-05-26 fd54ae8b0c [dev.typeparams] cmd/compile: adding union support in types1 + 2021-05-25 6c9e1c58bc [dev.typeparams] test: fix and update run.go's generics testing + 2021-05-25 5c1e119d48 [dev.typeparams] all: merge master (f22ec51) into dev.typeparams + 2021-05-24 155dc0e541 [dev.typeparams] cmd/compile/internal/types2: factor out constraint satisfaction check + 2021-05-24 5770d7a637 [dev.typeparams] cmd/compile/internal/types2: accept embedded interface elements + 2021-05-24 cc7ceea585 [dev.typeparams] cmd/compile/internal/types2: simplify Interface accessors + 2021-05-24 1608577e05 [dev.typeparams] cmd/compile/internal/types2: re-use existing code for Interface.Complete + 2021-05-24 d48f6d9f6f [dev.typeparams] Don't check typecheck(3) on transform, so no need to export/import it + 2021-05-24 4c50721cda [dev.typeparams] cmd/compile: Fix handling of Name nodes during stenciling + 2021-05-24 dcaf785add [dev.typeparams] internal/buildcfg: enable defer/go wrapping everywhere + 2021-05-24 f642742678 [dev.typeparams] reflect: use internal/abi.FuncPCABI0 to take address of assembly functions + 2021-05-24 e0844acfc8 [dev.typeparams] runtime/pprof: replace funcPC with internal/abi.FuncPCABIInternal + 2021-05-24 ae26b45113 [dev.typeparams] cmd/compile/abi-internal.md: specify ARM64 register-based ABI + 2021-05-24 b18b2d372e [dev.typeparams] cmd/compile: fix case where we were copying a raw Node + 2021-05-21 5b1120fac7 [dev.typeparams] cmd/compile: fix handling of Nname field in (*subster).tstruct. + 2021-05-21 8d2b4cb6cc [dev.typeparams] cmd/compile: fixing import of comm clauses/closures in generic functions + 2021-05-21 626e89c261 [dev.typeparams] runtime: replace funcPC with internal/abi.FuncPCABIInternal + 2021-05-21 6a81e063dd [dev.typeparams] runtime: fix misuse of funcPC + 2021-05-21 7d928460a1 [dev.typeparams] runtime: use internal/abi.FuncPCABI0 to reference ABI0 assembly symbols + 2021-05-21 0e0a1f94f3 [dev.typeparams] runtime: use ABI0 handler addresses on Windows/ARM64 + 2021-05-21 fb42fb705d [dev.typeparams] runtime: use internal/abi.FuncPCABI0 to take address of assembly functions + 2021-05-21 21db1d193c [dev.typeparams] runtime: fix newproc arg size on ARM + 2021-05-21 b1a398cf0f [dev.typeparams] cmd/compile: add import/export of calls to builtin functions + 2021-05-21 ccbfbb1c33 [dev.typeparams] cmd/compile: export OFUNCINST and OSELRECV2 nodes (for generic functions) + 2021-05-21 243076da64 [dev.typeparams] cmd/compile/internal/types2: move signature checking into separate file + 2021-05-21 cfe0250497 [dev.typeparams] cmd/compile/internal/types2: move struct checking into separate file + 2021-05-21 211244e172 [dev.typeparams] cmd/compile/internal/types2: move interface checking into separate file + 2021-05-21 7b3ee6102d [dev.typeparams] cmd/compile: move to new export version, keep reading previous version + 2021-05-21 15ad61aff5 [dev.typeparams] cmd/compile: get export/import of generic types & functions working + 2021-05-20 468efd5e2f [dev.typeparams] cmd/compile: change method instantiations back to being functions + 2021-05-20 382c5dd5f7 [dev.typeparams] internal/buildcfg: turn on register ABI on all AMD64 platforms + 2021-05-20 240d6d00ca [dev.typeparams] cmd/link: mangle symbol ABI name on Plan 9 + 2021-05-20 ed2001232a [dev.typeparams] runtime: use internal/abi.FuncPCABI0 for sigtramp PC on Plan 9 + 2021-05-20 02117775d1 [dev.typeparams] cmd/compile, runtime: do not zero X15 on Plan 9 + 2021-05-20 a5cd89b8c3 [dev.typeparams] runtime: use internal/abi.FuncPCABI0 and cgo_unsafe_args for Solaris syscall wrappers + 2021-05-19 6bdfff112f [dev.typeparams] cmd/compile/internal/types2: use correct type parameter list in missingMethod + 2021-05-19 eff66248ea [dev.typeparams] cmd/compile/internal/types2: implement package height + 2021-05-19 3f6f12972b [dev.typeparams] runtime: use internal/abi.FuncPCABI0 for sigtramp PC on DragonflyBSD + 2021-05-19 b69347d24a [dev.typeparams] cmd/compile: simplify tparam's type + 2021-05-19 701bd60646 [dev.typeparams] cmd/compile: simplify targ's type + 2021-05-19 c2966ae272 [dev.typeparams] cmd/compile/internal/ir: more position details in dump + 2021-05-19 fb79f6955e [dev.typeparams] cmd/compile/internal/importer: implement position reading + 2021-05-19 c92ae885d9 [dev.typeparams] cmd/compile/internal/types2: better recv Var for method expressions + 2021-05-19 90b6e72605 [dev.typeparams] cmd/compile/internal/types2: tweak anonymous parameter position + 2021-05-19 fc9e64cc98 [dev.typeparams] cmd/compile/internal/types2: fix types2 panic + 2021-05-19 c81562d99f [dev.typeparams] test: update regress tests for types2 + 2021-05-19 81b22480cf [dev.typeparams] cmd/compile/internal/syntax: accept embedded type literals + 2021-05-18 f3fc8b5779 [dev.typeparams] cmd/compile: simplify type alias handling for export + 2021-05-18 140cd7c1d3 [dev.typeparams] runtime: use internal/abi.FuncPCABI0 for syscall wrappers on OpenBSD + 2021-05-18 bbc0059b03 [dev.typeparams] test: run more tests with -G=3 + 2021-05-18 f208f1ac99 [dev.typeparams] cmd/compile/internal/ir: more useful Fatalfs + 2021-05-18 c7dd3e305d [dev.typeparams] all: merge master (690a8c3) into dev.typeparams + 2021-05-18 077f03f4d8 [dev.typeparams] runtime: use internal/abi.FuncPCABI0 for sigtramp PC on FreeBSD + 2021-05-17 f39200b037 [dev.typeparams] go/constant: implement Kind.String + 2021-05-14 0d1e293b23 [dev.typeparams] cmd/compile/internal/types2: print "incomplete" for interfaces in debug mode only + 2021-05-14 03ed590e51 [dev.typeparams] cmd/compile/internal/types2: use Checker-provided type parameter IDs when possible + 2021-05-13 c3fa51c9a2 cmd/compile: changed representation of typeparam bound in types1 + 2021-05-13 9daf3cca82 [dev.typeparams] cmd/compile: keep instantiated method as a method, rather than converting to function + 2021-05-12 04f65d394c [dev.typeparams] cmd/compile: fix use of method values with stenciled methods + 2021-05-11 d2b3efcb90 [dev.typeparams] all: merge master (9b84814) into dev.typeparams Change-Id: Idedf034141b432e69a19b076dcc10c2923f3ee7d
Diffstat (limited to 'src/cmd')
-rw-r--r--src/cmd/asm/internal/arch/arm64.go31
-rw-r--r--src/cmd/asm/internal/asm/testdata/arm64.s14
-rw-r--r--src/cmd/asm/internal/asm/testdata/arm64error.s6
-rw-r--r--src/cmd/cgo/out.go8
-rw-r--r--src/cmd/compile/abi-internal.md122
-rw-r--r--src/cmd/compile/internal/amd64/galign.go3
-rw-r--r--src/cmd/compile/internal/amd64/ggen.go10
-rw-r--r--src/cmd/compile/internal/amd64/ssa.go47
-rw-r--r--src/cmd/compile/internal/arm/galign.go1
-rw-r--r--src/cmd/compile/internal/arm64/galign.go3
-rw-r--r--src/cmd/compile/internal/arm64/ssa.go62
-rw-r--r--src/cmd/compile/internal/base/bootstrap_false.go12
-rw-r--r--src/cmd/compile/internal/base/bootstrap_true.go12
-rw-r--r--src/cmd/compile/internal/base/debug.go3
-rw-r--r--src/cmd/compile/internal/base/flag.go5
-rw-r--r--src/cmd/compile/internal/base/mapfile_mmap.go (renamed from src/cmd/compile/internal/typecheck/mapfile_mmap.go)4
-rw-r--r--src/cmd/compile/internal/base/mapfile_read.go (renamed from src/cmd/compile/internal/typecheck/mapfile_read.go)4
-rw-r--r--src/cmd/compile/internal/base/print.go21
-rw-r--r--src/cmd/compile/internal/deadcode/deadcode.go13
-rw-r--r--src/cmd/compile/internal/dwarfgen/dwarf.go6
-rw-r--r--src/cmd/compile/internal/dwarfgen/dwinl.go2
-rw-r--r--src/cmd/compile/internal/escape/assign.go120
-rw-r--r--src/cmd/compile/internal/escape/call.go428
-rw-r--r--src/cmd/compile/internal/escape/desugar.go37
-rw-r--r--src/cmd/compile/internal/escape/escape.go1772
-rw-r--r--src/cmd/compile/internal/escape/expr.go335
-rw-r--r--src/cmd/compile/internal/escape/graph.go324
-rw-r--r--src/cmd/compile/internal/escape/leaks.go106
-rw-r--r--src/cmd/compile/internal/escape/solve.go289
-rw-r--r--src/cmd/compile/internal/escape/stmt.go207
-rw-r--r--src/cmd/compile/internal/escape/utils.go215
-rw-r--r--src/cmd/compile/internal/gc/export.go116
-rw-r--r--src/cmd/compile/internal/gc/main.go43
-rw-r--r--src/cmd/compile/internal/gc/obj.go10
-rw-r--r--src/cmd/compile/internal/importer/exportdata.go1
-rw-r--r--src/cmd/compile/internal/importer/gcimporter.go3
-rw-r--r--src/cmd/compile/internal/importer/gcimporter_test.go23
-rw-r--r--src/cmd/compile/internal/importer/iimport.go279
-rw-r--r--src/cmd/compile/internal/importer/support.go4
-rw-r--r--src/cmd/compile/internal/inline/inl.go518
-rw-r--r--src/cmd/compile/internal/ir/expr.go135
-rw-r--r--src/cmd/compile/internal/ir/fmt.go45
-rw-r--r--src/cmd/compile/internal/ir/func.go142
-rw-r--r--src/cmd/compile/internal/ir/name.go69
-rw-r--r--src/cmd/compile/internal/ir/node.go41
-rw-r--r--src/cmd/compile/internal/ir/node_gen.go72
-rw-r--r--src/cmd/compile/internal/ir/op_string.go143
-rw-r--r--src/cmd/compile/internal/ir/package.go3
-rw-r--r--src/cmd/compile/internal/ir/scc.go2
-rw-r--r--src/cmd/compile/internal/ir/stmt.go2
-rw-r--r--src/cmd/compile/internal/ir/type.go27
-rw-r--r--src/cmd/compile/internal/ir/val.go2
-rw-r--r--src/cmd/compile/internal/liveness/plive.go4
-rw-r--r--src/cmd/compile/internal/logopt/logopt_test.go6
-rw-r--r--src/cmd/compile/internal/mips/galign.go1
-rw-r--r--src/cmd/compile/internal/mips64/galign.go1
-rw-r--r--src/cmd/compile/internal/noder/codes.go124
-rw-r--r--src/cmd/compile/internal/noder/decl.go170
-rw-r--r--src/cmd/compile/internal/noder/decoder.go301
-rw-r--r--src/cmd/compile/internal/noder/encoder.go284
-rw-r--r--src/cmd/compile/internal/noder/export.go65
-rw-r--r--src/cmd/compile/internal/noder/expr.go218
-rw-r--r--src/cmd/compile/internal/noder/frames_go1.go21
-rw-r--r--src/cmd/compile/internal/noder/frames_go17.go25
-rw-r--r--src/cmd/compile/internal/noder/helpers.go151
-rw-r--r--src/cmd/compile/internal/noder/import.go302
-rw-r--r--src/cmd/compile/internal/noder/irgen.go139
-rw-r--r--src/cmd/compile/internal/noder/linker.go296
-rw-r--r--src/cmd/compile/internal/noder/noder.go184
-rw-r--r--src/cmd/compile/internal/noder/object.go35
-rw-r--r--src/cmd/compile/internal/noder/posmap.go6
-rw-r--r--src/cmd/compile/internal/noder/quirks.go450
-rw-r--r--src/cmd/compile/internal/noder/reader.go2389
-rw-r--r--src/cmd/compile/internal/noder/reader2.go509
-rw-r--r--src/cmd/compile/internal/noder/reloc.go42
-rw-r--r--src/cmd/compile/internal/noder/stencil.go2131
-rw-r--r--src/cmd/compile/internal/noder/stmt.go32
-rw-r--r--src/cmd/compile/internal/noder/sync.go187
-rw-r--r--src/cmd/compile/internal/noder/syncmarker_string.go156
-rw-r--r--src/cmd/compile/internal/noder/transform.go183
-rw-r--r--src/cmd/compile/internal/noder/types.go222
-rw-r--r--src/cmd/compile/internal/noder/unified.go340
-rw-r--r--src/cmd/compile/internal/noder/unified_test.go153
-rw-r--r--src/cmd/compile/internal/noder/validate.go21
-rw-r--r--src/cmd/compile/internal/noder/writer.go1882
-rw-r--r--src/cmd/compile/internal/pkginit/initorder.go2
-rw-r--r--src/cmd/compile/internal/ppc64/galign.go1
-rw-r--r--src/cmd/compile/internal/ppc64/ggen.go27
-rw-r--r--src/cmd/compile/internal/reflectdata/alg.go6
-rw-r--r--src/cmd/compile/internal/reflectdata/reflect.go523
-rw-r--r--src/cmd/compile/internal/riscv64/galign.go1
-rw-r--r--src/cmd/compile/internal/riscv64/ssa.go36
-rw-r--r--src/cmd/compile/internal/s390x/galign.go1
-rw-r--r--src/cmd/compile/internal/ssa/config.go16
-rw-r--r--src/cmd/compile/internal/ssa/expand_calls.go14
-rw-r--r--src/cmd/compile/internal/ssa/export_test.go2
-rw-r--r--src/cmd/compile/internal/ssa/gen/AMD64.rules2
-rw-r--r--src/cmd/compile/internal/ssa/gen/ARM64.rules9
-rw-r--r--src/cmd/compile/internal/ssa/gen/ARM64Ops.go28
-rw-r--r--src/cmd/compile/internal/ssa/gen/PPC64Ops.go1
-rw-r--r--src/cmd/compile/internal/ssa/gen/RISCV64.rules12
-rw-r--r--src/cmd/compile/internal/ssa/gen/RISCV64Ops.go4
-rw-r--r--src/cmd/compile/internal/ssa/gen/dec64Ops.go1
-rw-r--r--src/cmd/compile/internal/ssa/gen/decOps.go1
-rw-r--r--src/cmd/compile/internal/ssa/opGen.go42
-rw-r--r--src/cmd/compile/internal/ssa/regalloc.go33
-rw-r--r--src/cmd/compile/internal/ssa/rewrite.go36
-rw-r--r--src/cmd/compile/internal/ssa/rewriteAMD64.go5
-rw-r--r--src/cmd/compile/internal/ssa/rewriteARM64.go30
-rw-r--r--src/cmd/compile/internal/ssa/rewriteRISCV64.go76
-rw-r--r--src/cmd/compile/internal/ssa/schedule.go2
-rw-r--r--src/cmd/compile/internal/ssa/softfloat.go1
-rw-r--r--src/cmd/compile/internal/ssa/writebarrier.go3
-rw-r--r--src/cmd/compile/internal/ssagen/abi.go3
-rw-r--r--src/cmd/compile/internal/ssagen/arch.go11
-rw-r--r--src/cmd/compile/internal/ssagen/pgen.go5
-rw-r--r--src/cmd/compile/internal/ssagen/ssa.go532
-rw-r--r--src/cmd/compile/internal/staticdata/data.go8
-rw-r--r--src/cmd/compile/internal/staticdata/embed.go7
-rw-r--r--src/cmd/compile/internal/staticinit/sched.go4
-rw-r--r--src/cmd/compile/internal/syntax/parser.go46
-rw-r--r--src/cmd/compile/internal/syntax/pos.go19
-rw-r--r--src/cmd/compile/internal/syntax/positions.go4
-rw-r--r--src/cmd/compile/internal/syntax/testdata/go2/linalg.go216
-rw-r--r--src/cmd/compile/internal/syntax/testdata/go2/smoketest.go26
-rw-r--r--src/cmd/compile/internal/syntax/testdata/go2/typeinst2.go210
-rw-r--r--src/cmd/compile/internal/syntax/testdata/go2/typeparams.go268
-rw-r--r--src/cmd/compile/internal/syntax/testdata/interface.go246
-rw-r--r--src/cmd/compile/internal/syntax/testdata/issue46558.src (renamed from src/cmd/gofmt/gofmt_typeparams_test.go)16
-rw-r--r--src/cmd/compile/internal/syntax/testdata/issue47704.go218
-rw-r--r--src/cmd/compile/internal/syntax/testdata/issue47704.src18
-rw-r--r--src/cmd/compile/internal/syntax/walk.go74
-rw-r--r--src/cmd/compile/internal/test/abiutils_test.go12
-rw-r--r--src/cmd/compile/internal/test/inl_test.go4
-rw-r--r--src/cmd/compile/internal/typecheck/bexport.go3
-rw-r--r--src/cmd/compile/internal/typecheck/builtin.go423
-rw-r--r--src/cmd/compile/internal/typecheck/builtin/runtime.go19
-rw-r--r--src/cmd/compile/internal/typecheck/const.go6
-rw-r--r--src/cmd/compile/internal/typecheck/crawler.go231
-rw-r--r--src/cmd/compile/internal/typecheck/dcl.go56
-rw-r--r--src/cmd/compile/internal/typecheck/export.go26
-rw-r--r--src/cmd/compile/internal/typecheck/expr.go159
-rw-r--r--src/cmd/compile/internal/typecheck/func.go243
-rw-r--r--src/cmd/compile/internal/typecheck/iexport.go340
-rw-r--r--src/cmd/compile/internal/typecheck/iimport.go562
-rw-r--r--src/cmd/compile/internal/typecheck/stmt.go41
-rw-r--r--src/cmd/compile/internal/typecheck/subr.go540
-rw-r--r--src/cmd/compile/internal/typecheck/syms.go4
-rw-r--r--src/cmd/compile/internal/typecheck/typecheck.go55
-rw-r--r--src/cmd/compile/internal/typecheck/universe.go15
-rw-r--r--src/cmd/compile/internal/types/fmt.go95
-rw-r--r--src/cmd/compile/internal/types/identity.go16
-rw-r--r--src/cmd/compile/internal/types/kind_string.go23
-rw-r--r--src/cmd/compile/internal/types/pkg.go4
-rw-r--r--src/cmd/compile/internal/types/size.go44
-rw-r--r--src/cmd/compile/internal/types/sizeof_test.go2
-rw-r--r--src/cmd/compile/internal/types/sort.go13
-rw-r--r--src/cmd/compile/internal/types/sym.go8
-rw-r--r--src/cmd/compile/internal/types/type.go202
-rw-r--r--src/cmd/compile/internal/types2/api.go24
-rw-r--r--src/cmd/compile/internal/types2/api_test.go208
-rw-r--r--src/cmd/compile/internal/types2/array.go25
-rw-r--r--src/cmd/compile/internal/types2/assignments.go2
-rw-r--r--src/cmd/compile/internal/types2/basic.go82
-rw-r--r--src/cmd/compile/internal/types2/builtins.go217
-rw-r--r--src/cmd/compile/internal/types2/builtins_test.go13
-rw-r--r--src/cmd/compile/internal/types2/call.go64
-rw-r--r--src/cmd/compile/internal/types2/chan.go35
-rw-r--r--src/cmd/compile/internal/types2/check.go20
-rw-r--r--src/cmd/compile/internal/types2/conversions.go6
-rw-r--r--src/cmd/compile/internal/types2/decl.go243
-rw-r--r--src/cmd/compile/internal/types2/errorcalls_test.go2
-rw-r--r--src/cmd/compile/internal/types2/errors.go6
-rw-r--r--src/cmd/compile/internal/types2/expr.go101
-rw-r--r--src/cmd/compile/internal/types2/index.go125
-rw-r--r--src/cmd/compile/internal/types2/infer.go98
-rw-r--r--src/cmd/compile/internal/types2/instantiate.go274
-rw-r--r--src/cmd/compile/internal/types2/interface.go226
-rw-r--r--src/cmd/compile/internal/types2/issues_test.go5
-rw-r--r--src/cmd/compile/internal/types2/labels.go3
-rw-r--r--src/cmd/compile/internal/types2/lookup.go118
-rw-r--r--src/cmd/compile/internal/types2/map.go24
-rw-r--r--src/cmd/compile/internal/types2/named.go295
-rw-r--r--src/cmd/compile/internal/types2/object.go80
-rw-r--r--src/cmd/compile/internal/types2/object_test.go2
-rw-r--r--src/cmd/compile/internal/types2/operand.go45
-rw-r--r--src/cmd/compile/internal/types2/package.go22
-rw-r--r--src/cmd/compile/internal/types2/pointer.go19
-rw-r--r--src/cmd/compile/internal/types2/predicates.go166
-rw-r--r--src/cmd/compile/internal/types2/resolver.go31
-rw-r--r--src/cmd/compile/internal/types2/resolver_test.go4
-rw-r--r--src/cmd/compile/internal/types2/sanitize.go202
-rw-r--r--src/cmd/compile/internal/types2/scope.go97
-rw-r--r--src/cmd/compile/internal/types2/signature.go393
-rw-r--r--src/cmd/compile/internal/types2/sizeof_test.go14
-rw-r--r--src/cmd/compile/internal/types2/sizes.go10
-rw-r--r--src/cmd/compile/internal/types2/slice.go19
-rw-r--r--src/cmd/compile/internal/types2/stmt.go82
-rw-r--r--src/cmd/compile/internal/types2/struct.go213
-rw-r--r--src/cmd/compile/internal/types2/subst.go372
-rw-r--r--src/cmd/compile/internal/types2/termlist.go167
-rw-r--r--src/cmd/compile/internal/types2/termlist_test.go313
-rw-r--r--src/cmd/compile/internal/types2/testdata/check/builtins.go2230
-rw-r--r--src/cmd/compile/internal/types2/testdata/check/builtins.src2
-rw-r--r--src/cmd/compile/internal/types2/testdata/check/const0.src2
-rw-r--r--src/cmd/compile/internal/types2/testdata/check/cycles4.src15
-rw-r--r--src/cmd/compile/internal/types2/testdata/check/decls0.src10
-rw-r--r--src/cmd/compile/internal/types2/testdata/check/decls1.src2
-rw-r--r--src/cmd/compile/internal/types2/testdata/check/expr1.src4
-rw-r--r--src/cmd/compile/internal/types2/testdata/check/expr2.src2
-rw-r--r--src/cmd/compile/internal/types2/testdata/check/expr3.src2
-rw-r--r--src/cmd/compile/internal/types2/testdata/check/issues.go248
-rw-r--r--src/cmd/compile/internal/types2/testdata/check/issues.src12
-rw-r--r--src/cmd/compile/internal/types2/testdata/check/linalg.go216
-rw-r--r--src/cmd/compile/internal/types2/testdata/check/map2.go22
-rw-r--r--src/cmd/compile/internal/types2/testdata/check/mtypeparams.go22
-rw-r--r--src/cmd/compile/internal/types2/testdata/check/stmt0.src12
-rw-r--r--src/cmd/compile/internal/types2/testdata/check/tinference.go258
-rw-r--r--src/cmd/compile/internal/types2/testdata/check/typeinst2.go243
-rw-r--r--src/cmd/compile/internal/types2/testdata/check/typeparams.go2251
-rw-r--r--src/cmd/compile/internal/types2/testdata/check/unions.go266
-rw-r--r--src/cmd/compile/internal/types2/testdata/examples/constraints.go291
-rw-r--r--src/cmd/compile/internal/types2/testdata/examples/functions.go230
-rw-r--r--src/cmd/compile/internal/types2/testdata/examples/inference.go210
-rw-r--r--src/cmd/compile/internal/types2/testdata/examples/methods.go217
-rw-r--r--src/cmd/compile/internal/types2/testdata/examples/operations.go229
-rw-r--r--src/cmd/compile/internal/types2/testdata/examples/types.go286
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue39634.go217
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue39680.go28
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue39693.go217
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue39699.go22
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue39711.go24
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue39723.go22
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue39725.go24
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue39755.go24
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue39938.go24
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue39948.go22
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue39976.go22
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue40038.go22
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue40056.go22
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue40301.go24
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue40684.go24
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue40789.go237
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue41124.go218
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue42758.go22
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue43671.go258
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue45548.go22
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue45635.go25
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue45639.go212
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue45985.go22
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue46090.go29
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue46275.go226
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue46583.src28
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue47031.go220
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue47115.go240
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue47127.go237
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue47411.go226
-rw-r--r--src/cmd/compile/internal/types2/testdata/fixedbugs/issue47747.go268
-rw-r--r--src/cmd/compile/internal/types2/tuple.go36
-rw-r--r--src/cmd/compile/internal/types2/type.go930
-rw-r--r--src/cmd/compile/internal/types2/typelists.go69
-rw-r--r--src/cmd/compile/internal/types2/typeparam.go108
-rw-r--r--src/cmd/compile/internal/types2/types_test.go9
-rw-r--r--src/cmd/compile/internal/types2/typeset.go392
-rw-r--r--src/cmd/compile/internal/types2/typeset_test.go15
-rw-r--r--src/cmd/compile/internal/types2/typestring.go219
-rw-r--r--src/cmd/compile/internal/types2/typestring_test.go57
-rw-r--r--src/cmd/compile/internal/types2/typeterm.go166
-rw-r--r--src/cmd/compile/internal/types2/typeterm_test.go239
-rw-r--r--src/cmd/compile/internal/types2/typexpr.go846
-rw-r--r--src/cmd/compile/internal/types2/unify.go81
-rw-r--r--src/cmd/compile/internal/types2/union.go150
-rw-r--r--src/cmd/compile/internal/types2/universe.go81
-rw-r--r--src/cmd/compile/internal/walk/assign.go1
-rw-r--r--src/cmd/compile/internal/walk/builtin.go13
-rw-r--r--src/cmd/compile/internal/walk/closure.go96
-rw-r--r--src/cmd/compile/internal/walk/complit.go8
-rw-r--r--src/cmd/compile/internal/walk/convert.go296
-rw-r--r--src/cmd/compile/internal/walk/expr.go123
-rw-r--r--src/cmd/compile/internal/walk/order.go460
-rw-r--r--src/cmd/compile/internal/walk/stmt.go150
-rw-r--r--src/cmd/compile/internal/walk/switch.go45
-rw-r--r--src/cmd/compile/internal/walk/walk.go9
-rw-r--r--src/cmd/compile/internal/wasm/ssa.go7
-rw-r--r--src/cmd/compile/internal/x86/galign.go1
-rw-r--r--src/cmd/dist/build.go12
-rw-r--r--src/cmd/dist/test.go13
-rw-r--r--src/cmd/go.mod4
-rw-r--r--src/cmd/go.sum27
-rw-r--r--src/cmd/go/alldocs.go27
-rw-r--r--src/cmd/go/go_test.go2
-rw-r--r--src/cmd/go/internal/modcmd/vendor.go2
-rw-r--r--src/cmd/go/internal/modfetch/codehost/git.go131
-rw-r--r--src/cmd/go/internal/modload/buildlist.go19
-rw-r--r--src/cmd/go/internal/modload/init.go95
-rw-r--r--src/cmd/go/internal/test/test.go28
-rw-r--r--src/cmd/go/internal/test/testflag.go60
-rw-r--r--src/cmd/go/internal/work/gc.go24
-rw-r--r--src/cmd/go/internal/work/init.go4
-rw-r--r--src/cmd/go/testdata/mod/example.com_split-incompatible_v2.0.0+incompatible.txt2
-rw-r--r--src/cmd/go/testdata/mod/example.com_split-incompatible_v2.1.0-pre+incompatible.txt2
-rw-r--r--src/cmd/go/testdata/script/build_negative_p.txt5
-rw-r--r--src/cmd/go/testdata/script/mod_init_invalid_major.txt82
-rw-r--r--src/cmd/go/testdata/script/mod_lazy_import_allmod.txt2
-rw-r--r--src/cmd/go/testdata/script/mod_tidy_compat.txt2
-rw-r--r--src/cmd/go/testdata/script/mod_tidy_lazy_self.txt17
-rw-r--r--src/cmd/go/testdata/script/mod_vendor_goversion.txt2
-rw-r--r--src/cmd/go/testdata/script/mod_vendor_redundant_requirement.txt29
-rw-r--r--src/cmd/go/testdata/script/test_cache_inputs.txt6
-rw-r--r--src/cmd/go/testdata/script/test_vet.txt20
-rw-r--r--src/cmd/go/testdata/script/work.txt6
-rw-r--r--src/cmd/go/testdata/script/work_edit.txt8
-rw-r--r--src/cmd/gofmt/gofmt_test.go7
-rw-r--r--src/cmd/gofmt/testdata/typeparams.golden2
-rw-r--r--src/cmd/gofmt/testdata/typeparams.input2
-rw-r--r--src/cmd/internal/buildid/buildid_test.go8
-rw-r--r--src/cmd/internal/buildid/rewrite.go3
-rw-r--r--src/cmd/internal/dwarf/dwarf.go13
-rw-r--r--src/cmd/internal/goobj/builtinlist.go7
-rw-r--r--src/cmd/internal/goobj/mkbuiltin.go4
-rw-r--r--src/cmd/internal/obj/arm/asm5.go11
-rw-r--r--src/cmd/internal/obj/arm64/asm7.go58
-rw-r--r--src/cmd/internal/obj/arm64/obj7.go104
-rw-r--r--src/cmd/internal/obj/objfile.go5
-rw-r--r--src/cmd/internal/obj/textflag.go4
-rw-r--r--src/cmd/internal/obj/wasm/wasmobj.go36
-rw-r--r--src/cmd/internal/obj/x86/asm6.go1
-rw-r--r--src/cmd/internal/obj/x86/obj6.go4
-rw-r--r--src/cmd/internal/objabi/funcid.go3
-rw-r--r--src/cmd/link/internal/ld/deadcode.go3
-rw-r--r--src/cmd/link/internal/ld/dwarf.go2
-rw-r--r--src/cmd/link/internal/ld/dwarf_test.go7
-rw-r--r--src/cmd/link/internal/ld/elf.go21
-rw-r--r--src/cmd/link/internal/ld/lib.go4
-rw-r--r--src/cmd/link/internal/ld/pcln.go9
-rw-r--r--src/cmd/link/internal/ld/pe.go2
-rw-r--r--src/cmd/link/internal/ld/symtab.go1
-rw-r--r--src/cmd/link/internal/loadelf/ldelf.go2
-rw-r--r--src/cmd/link/internal/loadmacho/ldmacho.go2
-rw-r--r--src/cmd/link/link_test.go10
-rw-r--r--src/cmd/trace/annotations.go5
-rw-r--r--src/cmd/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go37
-rw-r--r--src/cmd/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go6
-rw-r--r--src/cmd/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go2
-rw-r--r--src/cmd/vendor/golang.org/x/tools/internal/lsp/fuzzy/input.go37
-rw-r--r--src/cmd/vendor/golang.org/x/tools/internal/lsp/fuzzy/matcher.go23
-rw-r--r--src/cmd/vendor/golang.org/x/tools/internal/lsp/fuzzy/symbol.go224
-rw-r--r--src/cmd/vendor/golang.org/x/tools/internal/typeparams/common.go (renamed from src/cmd/vendor/golang.org/x/tools/internal/typeparams/doc.go)14
-rw-r--r--src/cmd/vendor/golang.org/x/tools/internal/typeparams/notypeparams.go39
-rw-r--r--src/cmd/vendor/golang.org/x/tools/internal/typeparams/typeparams.go60
-rw-r--r--src/cmd/vendor/modules.txt2
-rw-r--r--src/cmd/vet/testdata/print/print.go8
352 files changed, 23958 insertions, 9953 deletions
diff --git a/src/cmd/asm/internal/arch/arm64.go b/src/cmd/asm/internal/arch/arm64.go
index 40d828a1fe..24689c5ab1 100644
--- a/src/cmd/asm/internal/arch/arm64.go
+++ b/src/cmd/asm/internal/arch/arm64.go
@@ -165,27 +165,21 @@ func ARM64RegisterExtension(a *obj.Addr, ext string, reg, num int16, isAmount, i
}
}
if reg <= arm64.REG_R31 && reg >= arm64.REG_R0 {
+ if !isAmount {
+ return errors.New("invalid register extension")
+ }
switch ext {
case "UXTB":
- if !isAmount {
- return errors.New("invalid register extension")
- }
if a.Type == obj.TYPE_MEM {
return errors.New("invalid shift for the register offset addressing mode")
}
a.Reg = arm64.REG_UXTB + Rnum
case "UXTH":
- if !isAmount {
- return errors.New("invalid register extension")
- }
if a.Type == obj.TYPE_MEM {
return errors.New("invalid shift for the register offset addressing mode")
}
a.Reg = arm64.REG_UXTH + Rnum
case "UXTW":
- if !isAmount {
- return errors.New("invalid register extension")
- }
// effective address of memory is a base register value and an offset register value.
if a.Type == obj.TYPE_MEM {
a.Index = arm64.REG_UXTW + Rnum
@@ -193,48 +187,33 @@ func ARM64RegisterExtension(a *obj.Addr, ext string, reg, num int16, isAmount, i
a.Reg = arm64.REG_UXTW + Rnum
}
case "UXTX":
- if !isAmount {
- return errors.New("invalid register extension")
- }
if a.Type == obj.TYPE_MEM {
return errors.New("invalid shift for the register offset addressing mode")
}
a.Reg = arm64.REG_UXTX + Rnum
case "SXTB":
- if !isAmount {
- return errors.New("invalid register extension")
+ if a.Type == obj.TYPE_MEM {
+ return errors.New("invalid shift for the register offset addressing mode")
}
a.Reg = arm64.REG_SXTB + Rnum
case "SXTH":
- if !isAmount {
- return errors.New("invalid register extension")
- }
if a.Type == obj.TYPE_MEM {
return errors.New("invalid shift for the register offset addressing mode")
}
a.Reg = arm64.REG_SXTH + Rnum
case "SXTW":
- if !isAmount {
- return errors.New("invalid register extension")
- }
if a.Type == obj.TYPE_MEM {
a.Index = arm64.REG_SXTW + Rnum
} else {
a.Reg = arm64.REG_SXTW + Rnum
}
case "SXTX":
- if !isAmount {
- return errors.New("invalid register extension")
- }
if a.Type == obj.TYPE_MEM {
a.Index = arm64.REG_SXTX + Rnum
} else {
a.Reg = arm64.REG_SXTX + Rnum
}
case "LSL":
- if !isAmount {
- return errors.New("invalid register extension")
- }
a.Index = arm64.REG_LSL + Rnum
default:
return errors.New("unsupported general register extension type: " + ext)
diff --git a/src/cmd/asm/internal/asm/testdata/arm64.s b/src/cmd/asm/internal/asm/testdata/arm64.s
index d8a20edfc1..a4b56b0696 100644
--- a/src/cmd/asm/internal/asm/testdata/arm64.s
+++ b/src/cmd/asm/internal/asm/testdata/arm64.s
@@ -334,6 +334,8 @@ TEXT foo(SB), DUPOK|NOSPLIT, $-8
EONW $0x6006000060060, R5 // EONW $1689262177517664, R5 // 1b0c8052db00a072a5003b4a
ORNW $0x6006000060060, R5 // ORNW $1689262177517664, R5 // 1b0c8052db00a072a5003b2a
BICSW $0x6006000060060, R5 // BICSW $1689262177517664, R5 // 1b0c8052db00a072a5003b6a
+ AND $1, ZR // fb0340b2ff031b8a
+ ANDW $1, ZR // fb030032ff031b0a
// TODO: this could have better encoding
ANDW $-1, R10 // 1b0080124a011b0a
AND $8, R0, RSP // 1f007d92
@@ -369,9 +371,9 @@ TEXT foo(SB), DUPOK|NOSPLIT, $-8
MOVD $-1, R1 // 01008092
MOVD $0x210000, R0 // MOVD $2162688, R0 // 2004a0d2
MOVD $0xffffffffffffaaaa, R1 // MOVD $-21846, R1 // a1aa8a92
- MOVW $1, ZR
+ MOVW $1, ZR // 3f008052
MOVW $1, R1
- MOVD $1, ZR
+ MOVD $1, ZR // 3f0080d2
MOVD $1, R1
MOVK $1, R1
MOVD $0x1000100010001000, RSP // MOVD $1152939097061330944, RSP // ff8304b2
@@ -386,10 +388,10 @@ TEXT foo(SB), DUPOK|NOSPLIT, $-8
VMOVQ $0x8040201008040202, $0x7040201008040201, V20 // VMOVQ $-9205322385119247870, $8088500183983456769, V20
// mov(to/from sp)
- MOVD $0x1002(RSP), R1 // MOVD $4098(RSP), R1 // fb074091610b0091
- MOVD $0x1708(RSP), RSP // MOVD $5896(RSP), RSP // fb0740917f231c91
- MOVD $0x2001(R7), R1 // MOVD $8193(R7), R1 // fb08409161070091
- MOVD $0xffffff(R7), R1 // MOVD $16777215(R7), R1 // fbfc7f9161ff3f91
+ MOVD $0x1002(RSP), R1 // MOVD $4098(RSP), R1 // e107409121080091
+ MOVD $0x1708(RSP), RSP // MOVD $5896(RSP), RSP // ff074091ff231c91
+ MOVD $0x2001(R7), R1 // MOVD $8193(R7), R1 // e108409121040091
+ MOVD $0xffffff(R7), R1 // MOVD $16777215(R7), R1 // e1fc7f9121fc3f91
MOVD $-0x1(R7), R1 // MOVD $-1(R7), R1 // e10400d1
MOVD $-0x30(R7), R1 // MOVD $-48(R7), R1 // e1c000d1
MOVD $-0x708(R7), R1 // MOVD $-1800(R7), R1 // e1201cd1
diff --git a/src/cmd/asm/internal/asm/testdata/arm64error.s b/src/cmd/asm/internal/asm/testdata/arm64error.s
index cf57179e43..8b12b16680 100644
--- a/src/cmd/asm/internal/asm/testdata/arm64error.s
+++ b/src/cmd/asm/internal/asm/testdata/arm64error.s
@@ -3,7 +3,7 @@
// license that can be found in the LICENSE file.
TEXT errors(SB),$0
- AND $1, RSP // ERROR "illegal combination"
+ AND $1, RSP // ERROR "illegal source register"
ANDS $1, R0, RSP // ERROR "illegal combination"
ADDSW R7->32, R14, R13 // ERROR "shift amount out of range 0 to 31"
ADD R1.UXTB<<5, R2, R3 // ERROR "shift amount out of range 0 to 4"
@@ -419,4 +419,8 @@ TEXT errors(SB),$0
ADD R1>>2, RSP, R3 // ERROR "illegal combination"
ADDS R2<<3, R3, RSP // ERROR "unexpected SP reference"
CMP R1<<5, RSP // ERROR "the left shift amount out of range 0 to 4"
+ MOVD.P y+8(FP), R1 // ERROR "illegal combination"
+ MOVD.W x-8(SP), R1 // ERROR "illegal combination"
+ LDP.P x+8(FP), (R0, R1) // ERROR "illegal combination"
+ LDP.W x+8(SP), (R0, R1) // ERROR "illegal combination"
RET
diff --git a/src/cmd/cgo/out.go b/src/cmd/cgo/out.go
index 94152f4278..3badd73f79 100644
--- a/src/cmd/cgo/out.go
+++ b/src/cmd/cgo/out.go
@@ -59,9 +59,9 @@ func (p *Package) writeDefs() {
// Write C main file for using gcc to resolve imports.
fmt.Fprintf(fm, "int main() { return 0; }\n")
if *importRuntimeCgo {
- fmt.Fprintf(fm, "void crosscall2(void(*fn)(void*), void *a, int c, __SIZE_TYPE__ ctxt) { }\n")
+ fmt.Fprintf(fm, "void crosscall2(void(*fn)(void*) __attribute__((unused)), void *a __attribute__((unused)), int c __attribute__((unused)), __SIZE_TYPE__ ctxt __attribute__((unused))) { }\n")
fmt.Fprintf(fm, "__SIZE_TYPE__ _cgo_wait_runtime_init_done(void) { return 0; }\n")
- fmt.Fprintf(fm, "void _cgo_release_context(__SIZE_TYPE__ ctxt) { }\n")
+ fmt.Fprintf(fm, "void _cgo_release_context(__SIZE_TYPE__ ctxt __attribute__((unused))) { }\n")
fmt.Fprintf(fm, "char* _cgo_topofstack(void) { return (char*)0; }\n")
} else {
// If we're not importing runtime/cgo, we *are* runtime/cgo,
@@ -70,8 +70,8 @@ func (p *Package) writeDefs() {
fmt.Fprintf(fm, "__SIZE_TYPE__ _cgo_wait_runtime_init_done(void);\n")
fmt.Fprintf(fm, "void _cgo_release_context(__SIZE_TYPE__);\n")
}
- fmt.Fprintf(fm, "void _cgo_allocate(void *a, int c) { }\n")
- fmt.Fprintf(fm, "void _cgo_panic(void *a, int c) { }\n")
+ fmt.Fprintf(fm, "void _cgo_allocate(void *a __attribute__((unused)), int c __attribute__((unused))) { }\n")
+ fmt.Fprintf(fm, "void _cgo_panic(void *a __attribute__((unused)), int c __attribute__((unused))) { }\n")
fmt.Fprintf(fm, "void _cgo_reginit(void) { }\n")
// Write second Go output: definitions of _C_xxx.
diff --git a/src/cmd/compile/abi-internal.md b/src/cmd/compile/abi-internal.md
index 2bb4055083..3619aea4aa 100644
--- a/src/cmd/compile/abi-internal.md
+++ b/src/cmd/compile/abi-internal.md
@@ -505,6 +505,128 @@ control bits specified by the ELF AMD64 ABI.
The x87 floating-point control word is not used by Go on amd64.
+### arm64 architecture
+
+The arm64 architecture uses R0 – R15 for integer arguments and results.
+
+It uses F0 – F15 for floating-point arguments and results.
+
+*Rationale*: 16 integer registers and 16 floating-point registers are
+more than enough for passing arguments and results for practically all
+functions (see Appendix). While there are more registers available,
+using more registers provides little benefit. Additionally, it will add
+overhead on code paths where the number of arguments are not statically
+known (e.g. reflect call), and will consume more stack space when there
+is only limited stack space available to fit in the nosplit limit.
+
+Registers R16 and R17 are permanent scratch registers. They are also
+used as scratch registers by the linker (Go linker and external
+linker) in trampolines.
+
+Register R18 is reserved and never used. It is reserved for the OS
+on some platforms (e.g. macOS).
+
+Registers R19 – R25 are permanent scratch registers. In addition,
+R27 is a permanent scratch register used by the assembler when
+expanding instructions.
+
+Floating-point registers F16 – F31 are also permanent scratch
+registers.
+
+Special-purpose registers are as follows:
+
+| Register | Call meaning | Return meaning | Body meaning |
+| --- | --- | --- | --- |
+| RSP | Stack pointer | Same | Same |
+| R30 | Link register | Same | Scratch (non-leaf functions) |
+| R29 | Frame pointer | Same | Same |
+| R28 | Current goroutine | Same | Same |
+| R27 | Scratch | Scratch | Scratch |
+| R26 | Closure context pointer | Scratch | Scratch |
+| R18 | Reserved (not used) | Same | Same |
+| ZR | Zero value | Same | Same |
+
+*Rationale*: These register meanings are compatible with Go’s
+stack-based calling convention.
+
+*Rationale*: The link register, R30, holds the function return
+address at the function entry. For functions that have frames
+(including most non-leaf functions), R30 is saved to stack in the
+function prologue and restored in the epilogue. Within the function
+body, R30 can be used as a scratch register.
+
+*Implementation note*: Registers with fixed meaning at calls but not
+in function bodies must be initialized by "injected" calls such as
+signal-based panics.
+
+#### Stack layout
+
+The stack pointer, RSP, grows down and is always aligned to 16 bytes.
+
+*Rationale*: The arm64 architecture requires the stack pointer to be
+16-byte aligned.
+
+A function's stack frame, after the frame is created, is laid out as
+follows:
+
+ +------------------------------+
+ | ... locals ... |
+ | ... outgoing arguments ... |
+ | return PC | ← RSP points to
+ | frame pointer on entry |
+ +------------------------------+ ↓ lower addresses
+
+The "return PC" is loaded to the link register, R30, as part of the
+arm64 `CALL` operation.
+
+On entry, a function subtracts from RSP to open its stack frame, and
+saves the values of R30 and R29 at the bottom of the frame.
+Specifically, R30 is saved at 0(RSP) and R29 is saved at -8(RSP),
+after RSP is updated.
+
+A leaf function that does not require any stack space may omit the
+saved R30 and R29.
+
+The Go ABI's use of R29 as a frame pointer register is compatible with
+arm64 architecture requirement so that Go can inter-operate with platform
+debuggers and profilers.
+
+This stack layout is used by both register-based (ABIInternal) and
+stack-based (ABI0) calling conventions.
+
+#### Flags
+
+The arithmetic status flags (NZCV) are treated like scratch registers
+and not preserved across calls.
+All other bits in PSTATE are system flags and are not modified by Go.
+
+The floating-point status register (FPSR) is treated like scratch
+registers and not preserved across calls.
+
+At calls, the floating-point control register (FPCR) bits are always
+set as follows:
+
+| Flag | Bit | Value | Meaning |
+| --- | --- | --- | --- |
+| DN | 25 | 0 | Propagate NaN operands |
+| FZ | 24 | 0 | Do not flush to zero |
+| RC | 23/22 | 0 (RN) | Round to nearest, choose even if tied |
+| IDE | 15 | 0 | Denormal operations trap disabled |
+| IXE | 12 | 0 | Inexact trap disabled |
+| UFE | 11 | 0 | Underflow trap disabled |
+| OFE | 10 | 0 | Overflow trap disabled |
+| DZE | 9 | 0 | Divide-by-zero trap disabled |
+| IOE | 8 | 0 | Invalid operations trap disabled |
+| NEP | 2 | 0 | Scalar operations do not affect higher elements in vector registers |
+| AH | 1 | 0 | No alternate handling of de-normal inputs |
+| FIZ | 0 | 0 | Do not zero de-normals |
+
+*Rationale*: Having a fixed FPCR control configuration allows Go
+functions to use floating-point and vector (SIMD) operations without
+modifying or saving the FPCR.
+Functions are allowed to modify it between calls (as long as they
+restore it), but as of this writing Go code never does.
+
## Future directions
### Spill path improvements
diff --git a/src/cmd/compile/internal/amd64/galign.go b/src/cmd/compile/internal/amd64/galign.go
index 2785aa0336..ca44263afc 100644
--- a/src/cmd/compile/internal/amd64/galign.go
+++ b/src/cmd/compile/internal/amd64/galign.go
@@ -18,11 +18,10 @@ func Init(arch *ssagen.ArchInfo) {
arch.ZeroRange = zerorange
arch.Ginsnop = ginsnop
- arch.Ginsnopdefer = ginsnop
arch.SSAMarkMoves = ssaMarkMoves
arch.SSAGenValue = ssaGenValue
arch.SSAGenBlock = ssaGenBlock
- arch.LoadRegResults = loadRegResults
+ arch.LoadRegResult = loadRegResult
arch.SpillArgReg = spillArgReg
}
diff --git a/src/cmd/compile/internal/amd64/ggen.go b/src/cmd/compile/internal/amd64/ggen.go
index 1484ad5404..b8dce81a92 100644
--- a/src/cmd/compile/internal/amd64/ggen.go
+++ b/src/cmd/compile/internal/amd64/ggen.go
@@ -57,7 +57,6 @@ func dzDI(b int64) int64 {
func zerorange(pp *objw.Progs, p *obj.Prog, off, cnt int64, state *uint32) *obj.Prog {
const (
r13 = 1 << iota // if R13 is already zeroed.
- x15 // if X15 is already zeroed. Note: in new ABI, X15 is always zero.
)
if cnt == 0 {
@@ -85,11 +84,6 @@ func zerorange(pp *objw.Progs, p *obj.Prog, off, cnt int64, state *uint32) *obj.
}
p = pp.Append(p, x86.AMOVQ, obj.TYPE_REG, x86.REG_R13, 0, obj.TYPE_MEM, x86.REG_SP, off)
} else if !isPlan9 && cnt <= int64(8*types.RegSize) {
- if !buildcfg.Experiment.RegabiG && *state&x15 == 0 {
- p = pp.Append(p, x86.AXORPS, obj.TYPE_REG, x86.REG_X15, 0, obj.TYPE_REG, x86.REG_X15, 0)
- *state |= x15
- }
-
for i := int64(0); i < cnt/16; i++ {
p = pp.Append(p, x86.AMOVUPS, obj.TYPE_REG, x86.REG_X15, 0, obj.TYPE_MEM, x86.REG_SP, off+i*16)
}
@@ -98,10 +92,6 @@ func zerorange(pp *objw.Progs, p *obj.Prog, off, cnt int64, state *uint32) *obj.
p = pp.Append(p, x86.AMOVUPS, obj.TYPE_REG, x86.REG_X15, 0, obj.TYPE_MEM, x86.REG_SP, off+cnt-int64(16))
}
} else if !isPlan9 && (cnt <= int64(128*types.RegSize)) {
- if !buildcfg.Experiment.RegabiG && *state&x15 == 0 {
- p = pp.Append(p, x86.AXORPS, obj.TYPE_REG, x86.REG_X15, 0, obj.TYPE_REG, x86.REG_X15, 0)
- *state |= x15
- }
// Save DI to r12. With the amd64 Go register abi, DI can contain
// an incoming parameter, whereas R12 is always scratch.
p = pp.Append(p, x86.AMOVQ, obj.TYPE_REG, x86.REG_DI, 0, obj.TYPE_REG, x86.REG_R12, 0)
diff --git a/src/cmd/compile/internal/amd64/ssa.go b/src/cmd/compile/internal/amd64/ssa.go
index ca5f36e775..30dba057d0 100644
--- a/src/cmd/compile/internal/amd64/ssa.go
+++ b/src/cmd/compile/internal/amd64/ssa.go
@@ -823,7 +823,7 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
p.To.Reg = v.Args[0].Reg()
ssagen.AddAux2(&p.To, v, sc.Off64())
case ssa.OpAMD64MOVOstorezero:
- if !buildcfg.Experiment.RegabiG || s.ABI != obj.ABIInternal {
+ if s.ABI != obj.ABIInternal {
// zero X15 manually
opregreg(s, x86.AXORPS, x86.REG_X15, x86.REG_X15)
}
@@ -914,7 +914,7 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
p.To.Type = obj.TYPE_REG
p.To.Reg = v.Reg()
case ssa.OpAMD64DUFFZERO:
- if !buildcfg.Experiment.RegabiG || s.ABI != obj.ABIInternal {
+ if s.ABI != obj.ABIInternal {
// zero X15 manually
opregreg(s, x86.AXORPS, x86.REG_X15, x86.REG_X15)
}
@@ -997,22 +997,26 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
// Closure pointer is DX.
ssagen.CheckLoweredGetClosurePtr(v)
case ssa.OpAMD64LoweredGetG:
- if buildcfg.Experiment.RegabiG && s.ABI == obj.ABIInternal {
+ if s.ABI == obj.ABIInternal {
v.Fatalf("LoweredGetG should not appear in ABIInternal")
}
r := v.Reg()
getgFromTLS(s, r)
case ssa.OpAMD64CALLstatic:
- if buildcfg.Experiment.RegabiG && s.ABI == obj.ABI0 && v.Aux.(*ssa.AuxCall).Fn.ABI() == obj.ABIInternal {
+ if s.ABI == obj.ABI0 && v.Aux.(*ssa.AuxCall).Fn.ABI() == obj.ABIInternal {
// zeroing X15 when entering ABIInternal from ABI0
- opregreg(s, x86.AXORPS, x86.REG_X15, x86.REG_X15)
+ if buildcfg.GOOS != "plan9" { // do not use SSE on Plan 9
+ opregreg(s, x86.AXORPS, x86.REG_X15, x86.REG_X15)
+ }
// set G register from TLS
getgFromTLS(s, x86.REG_R14)
}
s.Call(v)
- if buildcfg.Experiment.RegabiG && s.ABI == obj.ABIInternal && v.Aux.(*ssa.AuxCall).Fn.ABI() == obj.ABI0 {
+ if s.ABI == obj.ABIInternal && v.Aux.(*ssa.AuxCall).Fn.ABI() == obj.ABI0 {
// zeroing X15 when entering ABIInternal from ABI0
- opregreg(s, x86.AXORPS, x86.REG_X15, x86.REG_X15)
+ if buildcfg.GOOS != "plan9" { // do not use SSE on Plan 9
+ opregreg(s, x86.AXORPS, x86.REG_X15, x86.REG_X15)
+ }
// set G register from TLS
getgFromTLS(s, x86.REG_R14)
}
@@ -1304,9 +1308,11 @@ func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
case ssa.BlockRet:
s.Prog(obj.ARET)
case ssa.BlockRetJmp:
- if buildcfg.Experiment.RegabiG && s.ABI == obj.ABI0 && b.Aux.(*obj.LSym).ABI() == obj.ABIInternal {
+ if s.ABI == obj.ABI0 && b.Aux.(*obj.LSym).ABI() == obj.ABIInternal {
// zeroing X15 when entering ABIInternal from ABI0
- opregreg(s, x86.AXORPS, x86.REG_X15, x86.REG_X15)
+ if buildcfg.GOOS != "plan9" { // do not use SSE on Plan 9
+ opregreg(s, x86.AXORPS, x86.REG_X15, x86.REG_X15)
+ }
// set G register from TLS
getgFromTLS(s, x86.REG_R14)
}
@@ -1348,20 +1354,15 @@ func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
}
}
-func loadRegResults(s *ssagen.State, f *ssa.Func) {
- for _, o := range f.OwnAux.ABIInfo().OutParams() {
- n := o.Name.(*ir.Name)
- rts, offs := o.RegisterTypesAndOffsets()
- for i := range o.Registers {
- p := s.Prog(loadByType(rts[i]))
- p.From.Type = obj.TYPE_MEM
- p.From.Name = obj.NAME_AUTO
- p.From.Sym = n.Linksym()
- p.From.Offset = n.FrameOffset() + offs[i]
- p.To.Type = obj.TYPE_REG
- p.To.Reg = ssa.ObjRegForAbiReg(o.Registers[i], f.Config)
- }
- }
+func loadRegResult(s *ssagen.State, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
+ p := s.Prog(loadByType(t))
+ p.From.Type = obj.TYPE_MEM
+ p.From.Name = obj.NAME_AUTO
+ p.From.Sym = n.Linksym()
+ p.From.Offset = n.FrameOffset() + off
+ p.To.Type = obj.TYPE_REG
+ p.To.Reg = reg
+ return p
}
func spillArgReg(pp *objw.Progs, p *obj.Prog, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
diff --git a/src/cmd/compile/internal/arm/galign.go b/src/cmd/compile/internal/arm/galign.go
index d68500280d..23e52bacbf 100644
--- a/src/cmd/compile/internal/arm/galign.go
+++ b/src/cmd/compile/internal/arm/galign.go
@@ -18,7 +18,6 @@ func Init(arch *ssagen.ArchInfo) {
arch.SoftFloat = buildcfg.GOARM == 5
arch.ZeroRange = zerorange
arch.Ginsnop = ginsnop
- arch.Ginsnopdefer = ginsnop
arch.SSAMarkMoves = func(s *ssagen.State, b *ssa.Block) {}
arch.SSAGenValue = ssaGenValue
diff --git a/src/cmd/compile/internal/arm64/galign.go b/src/cmd/compile/internal/arm64/galign.go
index d3db37e16f..3ebd860de8 100644
--- a/src/cmd/compile/internal/arm64/galign.go
+++ b/src/cmd/compile/internal/arm64/galign.go
@@ -18,9 +18,10 @@ func Init(arch *ssagen.ArchInfo) {
arch.PadFrame = padframe
arch.ZeroRange = zerorange
arch.Ginsnop = ginsnop
- arch.Ginsnopdefer = ginsnop
arch.SSAMarkMoves = func(s *ssagen.State, b *ssa.Block) {}
arch.SSAGenValue = ssaGenValue
arch.SSAGenBlock = ssaGenBlock
+ arch.LoadRegResult = loadRegResult
+ arch.SpillArgReg = spillArgReg
}
diff --git a/src/cmd/compile/internal/arm64/ssa.go b/src/cmd/compile/internal/arm64/ssa.go
index 0c997bc4b3..c3319f9491 100644
--- a/src/cmd/compile/internal/arm64/ssa.go
+++ b/src/cmd/compile/internal/arm64/ssa.go
@@ -10,6 +10,7 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/logopt"
+ "cmd/compile/internal/objw"
"cmd/compile/internal/ssa"
"cmd/compile/internal/ssagen"
"cmd/compile/internal/types"
@@ -161,6 +162,18 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
p.From.Type = obj.TYPE_REG
p.From.Reg = v.Args[0].Reg()
ssagen.AddrAuto(&p.To, v)
+ case ssa.OpArgIntReg, ssa.OpArgFloatReg:
+ // The assembler needs to wrap the entry safepoint/stack growth code with spill/unspill
+ // The loop only runs once.
+ for _, a := range v.Block.Func.RegArgs {
+ // Pass the spill/unspill information along to the assembler, offset by size of
+ // the saved LR slot.
+ addr := ssagen.SpillSlotAddr(a, arm64.REGSP, base.Ctxt.FixedFrameSize())
+ s.FuncInfo().AddSpill(
+ obj.RegSpill{Reg: a.Reg, Addr: addr, Unspill: loadByType(a.Type), Spill: storeByType(a.Type)})
+ }
+ v.Block.Func.RegArgs = nil
+ ssagen.CheckArgReg(v)
case ssa.OpARM64ADD,
ssa.OpARM64SUB,
ssa.OpARM64AND,
@@ -1101,8 +1114,34 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
v.Fatalf("FlagConstant op should never make it to codegen %v", v.LongString())
case ssa.OpARM64InvertFlags:
v.Fatalf("InvertFlags should never make it to codegen %v", v.LongString())
- case ssa.OpClobber, ssa.OpClobberReg:
- // TODO: implement for clobberdead experiment. Nop is ok for now.
+ case ssa.OpClobber:
+ // MOVW $0xdeaddead, REGTMP
+ // MOVW REGTMP, (slot)
+ // MOVW REGTMP, 4(slot)
+ p := s.Prog(arm64.AMOVW)
+ p.From.Type = obj.TYPE_CONST
+ p.From.Offset = 0xdeaddead
+ p.To.Type = obj.TYPE_REG
+ p.To.Reg = arm64.REGTMP
+ p = s.Prog(arm64.AMOVW)
+ p.From.Type = obj.TYPE_REG
+ p.From.Reg = arm64.REGTMP
+ p.To.Type = obj.TYPE_MEM
+ p.To.Reg = arm64.REGSP
+ ssagen.AddAux(&p.To, v)
+ p = s.Prog(arm64.AMOVW)
+ p.From.Type = obj.TYPE_REG
+ p.From.Reg = arm64.REGTMP
+ p.To.Type = obj.TYPE_MEM
+ p.To.Reg = arm64.REGSP
+ ssagen.AddAux2(&p.To, v, v.AuxInt+4)
+ case ssa.OpClobberReg:
+ x := uint64(0xdeaddeaddeaddead)
+ p := s.Prog(arm64.AMOVD)
+ p.From.Type = obj.TYPE_CONST
+ p.From.Offset = int64(x)
+ p.To.Type = obj.TYPE_REG
+ p.To.Reg = v.Reg()
default:
v.Fatalf("genValue not implemented: %s", v.LongString())
}
@@ -1266,3 +1305,22 @@ func ssaGenBlock(s *ssagen.State, b, next *ssa.Block) {
b.Fatalf("branch not implemented: %s", b.LongString())
}
}
+
+func loadRegResult(s *ssagen.State, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
+ p := s.Prog(loadByType(t))
+ p.From.Type = obj.TYPE_MEM
+ p.From.Name = obj.NAME_AUTO
+ p.From.Sym = n.Linksym()
+ p.From.Offset = n.FrameOffset() + off
+ p.To.Type = obj.TYPE_REG
+ p.To.Reg = reg
+ return p
+}
+
+func spillArgReg(pp *objw.Progs, p *obj.Prog, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog {
+ p = pp.Append(p, storeByType(t), obj.TYPE_REG, reg, 0, obj.TYPE_MEM, 0, n.FrameOffset()+off)
+ p.To.Name = obj.NAME_PARAM
+ p.To.Sym = n.Linksym()
+ p.Pos = p.Pos.WithNotStmt()
+ return p
+}
diff --git a/src/cmd/compile/internal/base/bootstrap_false.go b/src/cmd/compile/internal/base/bootstrap_false.go
new file mode 100644
index 0000000000..c77fcd7308
--- /dev/null
+++ b/src/cmd/compile/internal/base/bootstrap_false.go
@@ -0,0 +1,12 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !compiler_bootstrap
+// +build !compiler_bootstrap
+
+package base
+
+// CompilerBootstrap reports whether the current compiler binary was
+// built with -tags=compiler_bootstrap.
+const CompilerBootstrap = false
diff --git a/src/cmd/compile/internal/base/bootstrap_true.go b/src/cmd/compile/internal/base/bootstrap_true.go
new file mode 100644
index 0000000000..1eb58b2f9d
--- /dev/null
+++ b/src/cmd/compile/internal/base/bootstrap_true.go
@@ -0,0 +1,12 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build compiler_bootstrap
+// +build compiler_bootstrap
+
+package base
+
+// CompilerBootstrap reports whether the current compiler binary was
+// built with -tags=compiler_bootstrap.
+const CompilerBootstrap = true
diff --git a/src/cmd/compile/internal/base/debug.go b/src/cmd/compile/internal/base/debug.go
index 71712ab1a5..e2245e1c26 100644
--- a/src/cmd/compile/internal/base/debug.go
+++ b/src/cmd/compile/internal/base/debug.go
@@ -44,8 +44,11 @@ type DebugFlags struct {
Panic int `help:"show all compiler panics"`
Slice int `help:"print information about slice compilation"`
SoftFloat int `help:"force compiler to emit soft-float code"`
+ SyncFrames int `help:"how many writer stack frames to include at sync points in unified export data"`
TypeAssert int `help:"print information about type assertion inlining"`
TypecheckInl int `help:"eager typechecking of inline function bodies"`
+ Unified int `help:"enable unified IR construction"`
+ UnifiedQuirks int `help:"enable unified IR construction's quirks mode"`
WB int `help:"print information about write barriers"`
ABIWrap int `help:"print information about ABI wrapper generation"`
diff --git a/src/cmd/compile/internal/base/flag.go b/src/cmd/compile/internal/base/flag.go
index 42c0c1b94b..942659bcc0 100644
--- a/src/cmd/compile/internal/base/flag.go
+++ b/src/cmd/compile/internal/base/flag.go
@@ -140,6 +140,7 @@ type CmdFlags struct {
// ParseFlags parses the command-line flags into Flag.
func ParseFlags() {
+ Flag.G = 3
Flag.I = addImportDir
Flag.LowerC = 1
@@ -159,7 +160,11 @@ func ParseFlags() {
Flag.LinkShared = &Ctxt.Flag_linkshared
Flag.Shared = &Ctxt.Flag_shared
Flag.WB = true
+
Debug.InlFuncsWithClosures = 1
+ if buildcfg.Experiment.Unified {
+ Debug.Unified = 1
+ }
Debug.Checkptr = -1 // so we can tell whether it is set explicitly
diff --git a/src/cmd/compile/internal/typecheck/mapfile_mmap.go b/src/cmd/compile/internal/base/mapfile_mmap.go
index 298b385bcb..c1616db8e9 100644
--- a/src/cmd/compile/internal/typecheck/mapfile_mmap.go
+++ b/src/cmd/compile/internal/base/mapfile_mmap.go
@@ -5,7 +5,7 @@
//go:build darwin || dragonfly || freebsd || linux || netbsd || openbsd
// +build darwin dragonfly freebsd linux netbsd openbsd
-package typecheck
+package base
import (
"os"
@@ -19,7 +19,7 @@ import (
// mapFile returns length bytes from the file starting at the
// specified offset as a string.
-func mapFile(f *os.File, offset, length int64) (string, error) {
+func MapFile(f *os.File, offset, length int64) (string, error) {
// POSIX mmap: "The implementation may require that off is a
// multiple of the page size."
x := offset & int64(os.Getpagesize()-1)
diff --git a/src/cmd/compile/internal/typecheck/mapfile_read.go b/src/cmd/compile/internal/base/mapfile_read.go
index 9637ab97ab..01796a9bab 100644
--- a/src/cmd/compile/internal/typecheck/mapfile_read.go
+++ b/src/cmd/compile/internal/base/mapfile_read.go
@@ -5,14 +5,14 @@
//go:build !darwin && !dragonfly && !freebsd && !linux && !netbsd && !openbsd
// +build !darwin,!dragonfly,!freebsd,!linux,!netbsd,!openbsd
-package typecheck
+package base
import (
"io"
"os"
)
-func mapFile(f *os.File, offset, length int64) (string, error) {
+func MapFile(f *os.File, offset, length int64) (string, error) {
buf := make([]byte, length)
_, err := io.ReadFull(io.NewSectionReader(f, offset, length), buf)
if err != nil {
diff --git a/src/cmd/compile/internal/base/print.go b/src/cmd/compile/internal/base/print.go
index b095fd704d..4afe2eb9ee 100644
--- a/src/cmd/compile/internal/base/print.go
+++ b/src/cmd/compile/internal/base/print.go
@@ -233,6 +233,27 @@ func FatalfAt(pos src.XPos, format string, args ...interface{}) {
ErrorExit()
}
+// Assert reports "assertion failed" with Fatalf, unless b is true.
+func Assert(b bool) {
+ if !b {
+ Fatalf("assertion failed")
+ }
+}
+
+// Assertf reports a fatal error with Fatalf, unless b is true.
+func Assertf(b bool, format string, args ...interface{}) {
+ if !b {
+ Fatalf(format, args...)
+ }
+}
+
+// AssertfAt reports a fatal error with FatalfAt, unless b is true.
+func AssertfAt(b bool, pos src.XPos, format string, args ...interface{}) {
+ if !b {
+ FatalfAt(pos, format, args...)
+ }
+}
+
// hcrash crashes the compiler when -h is set, to find out where a message is generated.
func hcrash() {
if Flag.LowerH != 0 {
diff --git a/src/cmd/compile/internal/deadcode/deadcode.go b/src/cmd/compile/internal/deadcode/deadcode.go
index 520203787f..3658c89912 100644
--- a/src/cmd/compile/internal/deadcode/deadcode.go
+++ b/src/cmd/compile/internal/deadcode/deadcode.go
@@ -38,6 +38,7 @@ func Func(fn *ir.Func) {
}
}
+ ir.VisitList(fn.Body, markHiddenClosureDead)
fn.Body = []ir.Node{ir.NewBlockStmt(base.Pos, nil)}
}
@@ -62,9 +63,11 @@ func stmts(nn *ir.Nodes) {
if ir.IsConst(n.Cond, constant.Bool) {
var body ir.Nodes
if ir.BoolVal(n.Cond) {
+ ir.VisitList(n.Else, markHiddenClosureDead)
n.Else = ir.Nodes{}
body = n.Body
} else {
+ ir.VisitList(n.Body, markHiddenClosureDead)
n.Body = ir.Nodes{}
body = n.Else
}
@@ -150,3 +153,13 @@ func expr(n ir.Node) ir.Node {
}
return n
}
+
+func markHiddenClosureDead(n ir.Node) {
+ if n.Op() != ir.OCLOSURE {
+ return
+ }
+ clo := n.(*ir.ClosureExpr)
+ if clo.Func.IsHiddenClosure() {
+ clo.Func.SetIsDeadcodeClosure(true)
+ }
+}
diff --git a/src/cmd/compile/internal/dwarfgen/dwarf.go b/src/cmd/compile/internal/dwarfgen/dwarf.go
index 0e22b61bc3..30472a9ebd 100644
--- a/src/cmd/compile/internal/dwarfgen/dwarf.go
+++ b/src/cmd/compile/internal/dwarfgen/dwarf.go
@@ -214,7 +214,7 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir
Type: base.Ctxt.Lookup(typename),
DeclFile: declpos.RelFilename(),
DeclLine: declpos.RelLine(),
- DeclCol: declpos.Col(),
+ DeclCol: declpos.RelCol(),
InlIndex: int32(inlIndex),
ChildIndex: -1,
})
@@ -371,7 +371,7 @@ func createSimpleVar(fnsym *obj.LSym, n *ir.Name) *dwarf.Var {
Type: base.Ctxt.Lookup(typename),
DeclFile: declpos.RelFilename(),
DeclLine: declpos.RelLine(),
- DeclCol: declpos.Col(),
+ DeclCol: declpos.RelCol(),
InlIndex: int32(inlIndex),
ChildIndex: -1,
}
@@ -475,7 +475,7 @@ func createComplexVar(fnsym *obj.LSym, fn *ir.Func, varID ssa.VarID) *dwarf.Var
StackOffset: ssagen.StackOffset(debug.Slots[debug.VarSlots[varID][0]]),
DeclFile: declpos.RelFilename(),
DeclLine: declpos.RelLine(),
- DeclCol: declpos.Col(),
+ DeclCol: declpos.RelCol(),
InlIndex: int32(inlIndex),
ChildIndex: -1,
}
diff --git a/src/cmd/compile/internal/dwarfgen/dwinl.go b/src/cmd/compile/internal/dwarfgen/dwinl.go
index 8adb36fc88..c785e064a7 100644
--- a/src/cmd/compile/internal/dwarfgen/dwinl.go
+++ b/src/cmd/compile/internal/dwarfgen/dwinl.go
@@ -244,7 +244,7 @@ func makePreinlineDclMap(fnsym *obj.LSym) map[varPos]int {
DeclName: unversion(n.Sym().Name),
DeclFile: pos.RelFilename(),
DeclLine: pos.RelLine(),
- DeclCol: pos.Col(),
+ DeclCol: pos.RelCol(),
}
if _, found := m[vp]; found {
// We can see collisions (variables with the same name/file/line/col) in obfuscated or machine-generated code -- see issue 44378 for an example. Skip duplicates in such cases, since it is unlikely that a human will be debugging such code.
diff --git a/src/cmd/compile/internal/escape/assign.go b/src/cmd/compile/internal/escape/assign.go
new file mode 100644
index 0000000000..80697bf37b
--- /dev/null
+++ b/src/cmd/compile/internal/escape/assign.go
@@ -0,0 +1,120 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package escape
+
+import (
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+)
+
+// addr evaluates an addressable expression n and returns a hole
+// that represents storing into the represented location.
+func (e *escape) addr(n ir.Node) hole {
+ if n == nil || ir.IsBlank(n) {
+ // Can happen in select case, range, maybe others.
+ return e.discardHole()
+ }
+
+ k := e.heapHole()
+
+ switch n.Op() {
+ default:
+ base.Fatalf("unexpected addr: %v", n)
+ case ir.ONAME:
+ n := n.(*ir.Name)
+ if n.Class == ir.PEXTERN {
+ break
+ }
+ k = e.oldLoc(n).asHole()
+ case ir.OLINKSYMOFFSET:
+ break
+ case ir.ODOT:
+ n := n.(*ir.SelectorExpr)
+ k = e.addr(n.X)
+ case ir.OINDEX:
+ n := n.(*ir.IndexExpr)
+ e.discard(n.Index)
+ if n.X.Type().IsArray() {
+ k = e.addr(n.X)
+ } else {
+ e.discard(n.X)
+ }
+ case ir.ODEREF, ir.ODOTPTR:
+ e.discard(n)
+ case ir.OINDEXMAP:
+ n := n.(*ir.IndexExpr)
+ e.discard(n.X)
+ e.assignHeap(n.Index, "key of map put", n)
+ }
+
+ return k
+}
+
+func (e *escape) addrs(l ir.Nodes) []hole {
+ var ks []hole
+ for _, n := range l {
+ ks = append(ks, e.addr(n))
+ }
+ return ks
+}
+
+func (e *escape) assignHeap(src ir.Node, why string, where ir.Node) {
+ e.expr(e.heapHole().note(where, why), src)
+}
+
+// assignList evaluates the assignment dsts... = srcs....
+func (e *escape) assignList(dsts, srcs []ir.Node, why string, where ir.Node) {
+ ks := e.addrs(dsts)
+ for i, k := range ks {
+ var src ir.Node
+ if i < len(srcs) {
+ src = srcs[i]
+ }
+
+ if dst := dsts[i]; dst != nil {
+ // Detect implicit conversion of uintptr to unsafe.Pointer when
+ // storing into reflect.{Slice,String}Header.
+ if dst.Op() == ir.ODOTPTR && ir.IsReflectHeaderDataField(dst) {
+ e.unsafeValue(e.heapHole().note(where, why), src)
+ continue
+ }
+
+ // Filter out some no-op assignments for escape analysis.
+ if src != nil && isSelfAssign(dst, src) {
+ if base.Flag.LowerM != 0 {
+ base.WarnfAt(where.Pos(), "%v ignoring self-assignment in %v", e.curfn, where)
+ }
+ k = e.discardHole()
+ }
+ }
+
+ e.expr(k.note(where, why), src)
+ }
+
+ e.reassigned(ks, where)
+}
+
+// reassigned marks the locations associated with the given holes as
+// reassigned, unless the location represents a variable declared and
+// assigned exactly once by where.
+func (e *escape) reassigned(ks []hole, where ir.Node) {
+ if as, ok := where.(*ir.AssignStmt); ok && as.Op() == ir.OAS && as.Y == nil {
+ if dst, ok := as.X.(*ir.Name); ok && dst.Op() == ir.ONAME && dst.Defn == nil {
+ // Zero-value assignment for variable declared without an
+ // explicit initial value. Assume this is its initialization
+ // statement.
+ return
+ }
+ }
+
+ for _, k := range ks {
+ loc := k.dst
+ // Variables declared by range statements are assigned on every iteration.
+ if n, ok := loc.n.(*ir.Name); ok && n.Defn == where && where.Op() != ir.ORANGE {
+ continue
+ }
+ loc.reassigned = true
+ }
+}
diff --git a/src/cmd/compile/internal/escape/call.go b/src/cmd/compile/internal/escape/call.go
new file mode 100644
index 0000000000..9e5abed591
--- /dev/null
+++ b/src/cmd/compile/internal/escape/call.go
@@ -0,0 +1,428 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package escape
+
+import (
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/typecheck"
+ "cmd/compile/internal/types"
+ "cmd/internal/src"
+)
+
+// call evaluates a call expressions, including builtin calls. ks
+// should contain the holes representing where the function callee's
+// results flows.
+func (e *escape) call(ks []hole, call ir.Node) {
+ var init ir.Nodes
+ e.callCommon(ks, call, &init, nil)
+ if len(init) != 0 {
+ call.(*ir.CallExpr).PtrInit().Append(init...)
+ }
+}
+
+func (e *escape) callCommon(ks []hole, call ir.Node, init *ir.Nodes, wrapper *ir.Func) {
+
+ // argumentPragma handles escape analysis of argument *argp to the
+ // given hole. If the function callee is known, pragma is the
+ // function's pragma flags; otherwise 0.
+ argumentFunc := func(fn *ir.Name, k hole, argp *ir.Node) {
+ e.rewriteArgument(argp, init, call, fn, wrapper)
+
+ e.expr(k.note(call, "call parameter"), *argp)
+ }
+
+ argument := func(k hole, argp *ir.Node) {
+ argumentFunc(nil, k, argp)
+ }
+
+ switch call.Op() {
+ default:
+ ir.Dump("esc", call)
+ base.Fatalf("unexpected call op: %v", call.Op())
+
+ case ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER:
+ call := call.(*ir.CallExpr)
+ typecheck.FixVariadicCall(call)
+ typecheck.FixMethodCall(call)
+
+ // Pick out the function callee, if statically known.
+ //
+ // TODO(mdempsky): Change fn from *ir.Name to *ir.Func, but some
+ // functions (e.g., runtime builtins, method wrappers, generated
+ // eq/hash functions) don't have it set. Investigate whether
+ // that's a concern.
+ var fn *ir.Name
+ switch call.Op() {
+ case ir.OCALLFUNC:
+ // If we have a direct call to a closure (not just one we were
+ // able to statically resolve with ir.StaticValue), mark it as
+ // such so batch.outlives can optimize the flow results.
+ if call.X.Op() == ir.OCLOSURE {
+ call.X.(*ir.ClosureExpr).Func.SetClosureCalled(true)
+ }
+
+ switch v := ir.StaticValue(call.X); v.Op() {
+ case ir.ONAME:
+ if v := v.(*ir.Name); v.Class == ir.PFUNC {
+ fn = v
+ }
+ case ir.OCLOSURE:
+ fn = v.(*ir.ClosureExpr).Func.Nname
+ case ir.OMETHEXPR:
+ fn = ir.MethodExprName(v)
+ }
+ case ir.OCALLMETH:
+ base.FatalfAt(call.Pos(), "OCALLMETH missed by typecheck")
+ }
+
+ fntype := call.X.Type()
+ if fn != nil {
+ fntype = fn.Type()
+ }
+
+ if ks != nil && fn != nil && e.inMutualBatch(fn) {
+ for i, result := range fn.Type().Results().FieldSlice() {
+ e.expr(ks[i], ir.AsNode(result.Nname))
+ }
+ }
+
+ var recvp *ir.Node
+ if call.Op() == ir.OCALLFUNC {
+ // Evaluate callee function expression.
+ //
+ // Note: We use argument and not argumentFunc, because while
+ // call.X here may be an argument to runtime.{new,defer}proc,
+ // it's not an argument to fn itself.
+ argument(e.discardHole(), &call.X)
+ } else {
+ recvp = &call.X.(*ir.SelectorExpr).X
+ }
+
+ args := call.Args
+ if recv := fntype.Recv(); recv != nil {
+ if recvp == nil {
+ // Function call using method expression. Recevier argument is
+ // at the front of the regular arguments list.
+ recvp = &args[0]
+ args = args[1:]
+ }
+
+ argumentFunc(fn, e.tagHole(ks, fn, recv), recvp)
+ }
+
+ for i, param := range fntype.Params().FieldSlice() {
+ argumentFunc(fn, e.tagHole(ks, fn, param), &args[i])
+ }
+
+ case ir.OINLCALL:
+ call := call.(*ir.InlinedCallExpr)
+ e.stmts(call.Body)
+ for i, result := range call.ReturnVars {
+ k := e.discardHole()
+ if ks != nil {
+ k = ks[i]
+ }
+ e.expr(k, result)
+ }
+
+ case ir.OAPPEND:
+ call := call.(*ir.CallExpr)
+ args := call.Args
+
+ // Appendee slice may flow directly to the result, if
+ // it has enough capacity. Alternatively, a new heap
+ // slice might be allocated, and all slice elements
+ // might flow to heap.
+ appendeeK := ks[0]
+ if args[0].Type().Elem().HasPointers() {
+ appendeeK = e.teeHole(appendeeK, e.heapHole().deref(call, "appendee slice"))
+ }
+ argument(appendeeK, &args[0])
+
+ if call.IsDDD {
+ appendedK := e.discardHole()
+ if args[1].Type().IsSlice() && args[1].Type().Elem().HasPointers() {
+ appendedK = e.heapHole().deref(call, "appended slice...")
+ }
+ argument(appendedK, &args[1])
+ } else {
+ for i := 1; i < len(args); i++ {
+ argument(e.heapHole(), &args[i])
+ }
+ }
+
+ case ir.OCOPY:
+ call := call.(*ir.BinaryExpr)
+ argument(e.discardHole(), &call.X)
+
+ copiedK := e.discardHole()
+ if call.Y.Type().IsSlice() && call.Y.Type().Elem().HasPointers() {
+ copiedK = e.heapHole().deref(call, "copied slice")
+ }
+ argument(copiedK, &call.Y)
+
+ case ir.OPANIC:
+ call := call.(*ir.UnaryExpr)
+ argument(e.heapHole(), &call.X)
+
+ case ir.OCOMPLEX:
+ call := call.(*ir.BinaryExpr)
+ argument(e.discardHole(), &call.X)
+ argument(e.discardHole(), &call.Y)
+
+ case ir.ODELETE, ir.OPRINT, ir.OPRINTN, ir.ORECOVER:
+ call := call.(*ir.CallExpr)
+ fixRecoverCall(call)
+ for i := range call.Args {
+ argument(e.discardHole(), &call.Args[i])
+ }
+
+ case ir.OLEN, ir.OCAP, ir.OREAL, ir.OIMAG, ir.OCLOSE:
+ call := call.(*ir.UnaryExpr)
+ argument(e.discardHole(), &call.X)
+
+ case ir.OUNSAFEADD, ir.OUNSAFESLICE:
+ call := call.(*ir.BinaryExpr)
+ argument(ks[0], &call.X)
+ argument(e.discardHole(), &call.Y)
+ }
+}
+
+// goDeferStmt analyzes a "go" or "defer" statement.
+//
+// In the process, it also normalizes the statement to always use a
+// simple function call with no arguments and no results. For example,
+// it rewrites:
+//
+// defer f(x, y)
+//
+// into:
+//
+// x1, y1 := x, y
+// defer func() { f(x1, y1) }()
+func (e *escape) goDeferStmt(n *ir.GoDeferStmt) {
+ k := e.heapHole()
+ if n.Op() == ir.ODEFER && e.loopDepth == 1 {
+ // Top-level defer arguments don't escape to the heap,
+ // but they do need to last until they're invoked.
+ k = e.later(e.discardHole())
+
+ // force stack allocation of defer record, unless
+ // open-coded defers are used (see ssa.go)
+ n.SetEsc(ir.EscNever)
+ }
+
+ call := n.Call
+
+ init := n.PtrInit()
+ init.Append(ir.TakeInit(call)...)
+ e.stmts(*init)
+
+ // If the function is already a zero argument/result function call,
+ // just escape analyze it normally.
+ if call, ok := call.(*ir.CallExpr); ok && call.Op() == ir.OCALLFUNC {
+ if sig := call.X.Type(); sig.NumParams()+sig.NumResults() == 0 {
+ if clo, ok := call.X.(*ir.ClosureExpr); ok && n.Op() == ir.OGO {
+ clo.IsGoWrap = true
+ }
+ e.expr(k, call.X)
+ return
+ }
+ }
+
+ // Create a new no-argument function that we'll hand off to defer.
+ fn := ir.NewClosureFunc(n.Pos(), true)
+ fn.SetWrapper(true)
+ fn.Nname.SetType(types.NewSignature(types.LocalPkg, nil, nil, nil, nil))
+ fn.Body = []ir.Node{call}
+
+ clo := fn.OClosure
+ if n.Op() == ir.OGO {
+ clo.IsGoWrap = true
+ }
+
+ e.callCommon(nil, call, init, fn)
+ e.closures = append(e.closures, closure{e.spill(k, clo), clo})
+
+ // Create new top level call to closure.
+ n.Call = ir.NewCallExpr(call.Pos(), ir.OCALL, clo, nil)
+ ir.WithFunc(e.curfn, func() {
+ typecheck.Stmt(n.Call)
+ })
+}
+
+// rewriteArgument rewrites the argument *argp of the given call expression.
+// fn is the static callee function, if known.
+// wrapper is the go/defer wrapper function for call, if any.
+func (e *escape) rewriteArgument(argp *ir.Node, init *ir.Nodes, call ir.Node, fn *ir.Name, wrapper *ir.Func) {
+ var pragma ir.PragmaFlag
+ if fn != nil && fn.Func != nil {
+ pragma = fn.Func.Pragma
+ }
+
+ // unsafeUintptr rewrites "uintptr(ptr)" arguments to syscall-like
+ // functions, so that ptr is kept alive and/or escaped as
+ // appropriate. unsafeUintptr also reports whether it modified arg0.
+ unsafeUintptr := func(arg0 ir.Node) bool {
+ if pragma&(ir.UintptrKeepAlive|ir.UintptrEscapes) == 0 {
+ return false
+ }
+
+ // If the argument is really a pointer being converted to uintptr,
+ // arrange for the pointer to be kept alive until the call returns,
+ // by copying it into a temp and marking that temp
+ // still alive when we pop the temp stack.
+ if arg0.Op() != ir.OCONVNOP || !arg0.Type().IsUintptr() {
+ return false
+ }
+ arg := arg0.(*ir.ConvExpr)
+
+ if !arg.X.Type().IsUnsafePtr() {
+ return false
+ }
+
+ // Create and declare a new pointer-typed temp variable.
+ tmp := e.wrapExpr(arg.Pos(), &arg.X, init, call, wrapper)
+
+ if pragma&ir.UintptrEscapes != 0 {
+ e.flow(e.heapHole().note(arg, "//go:uintptrescapes"), e.oldLoc(tmp))
+ }
+
+ if pragma&ir.UintptrKeepAlive != 0 {
+ call := call.(*ir.CallExpr)
+
+ // SSA implements CallExpr.KeepAlive using OpVarLive, which
+ // doesn't support PAUTOHEAP variables. I tried changing it to
+ // use OpKeepAlive, but that ran into issues of its own.
+ // For now, the easy solution is to explicitly copy to (yet
+ // another) new temporary variable.
+ keep := tmp
+ if keep.Class == ir.PAUTOHEAP {
+ keep = e.copyExpr(arg.Pos(), tmp, call.PtrInit(), wrapper, false)
+ }
+
+ keep.SetAddrtaken(true) // ensure SSA keeps the tmp variable
+ call.KeepAlive = append(call.KeepAlive, keep)
+ }
+
+ return true
+ }
+
+ visit := func(pos src.XPos, argp *ir.Node) {
+ // Optimize a few common constant expressions. By leaving these
+ // untouched in the call expression, we let the wrapper handle
+ // evaluating them, rather than taking up closure context space.
+ switch arg := *argp; arg.Op() {
+ case ir.OLITERAL, ir.ONIL, ir.OMETHEXPR:
+ return
+ case ir.ONAME:
+ if arg.(*ir.Name).Class == ir.PFUNC {
+ return
+ }
+ }
+
+ if unsafeUintptr(*argp) {
+ return
+ }
+
+ if wrapper != nil {
+ e.wrapExpr(pos, argp, init, call, wrapper)
+ }
+ }
+
+ // Peel away any slice lits.
+ if arg := *argp; arg.Op() == ir.OSLICELIT {
+ list := arg.(*ir.CompLitExpr).List
+ for i := range list {
+ visit(arg.Pos(), &list[i])
+ }
+ } else {
+ visit(call.Pos(), argp)
+ }
+}
+
+// wrapExpr replaces *exprp with a temporary variable copy. If wrapper
+// is non-nil, the variable will be captured for use within that
+// function.
+func (e *escape) wrapExpr(pos src.XPos, exprp *ir.Node, init *ir.Nodes, call ir.Node, wrapper *ir.Func) *ir.Name {
+ tmp := e.copyExpr(pos, *exprp, init, e.curfn, true)
+
+ if wrapper != nil {
+ // Currently for "defer i.M()" if i is nil it panics at the point
+ // of defer statement, not when deferred function is called. We
+ // need to do the nil check outside of the wrapper.
+ if call.Op() == ir.OCALLINTER && exprp == &call.(*ir.CallExpr).X.(*ir.SelectorExpr).X {
+ check := ir.NewUnaryExpr(pos, ir.OCHECKNIL, ir.NewUnaryExpr(pos, ir.OITAB, tmp))
+ init.Append(typecheck.Stmt(check))
+ }
+
+ e.oldLoc(tmp).captured = true
+
+ tmp = ir.NewClosureVar(pos, wrapper, tmp)
+ }
+
+ *exprp = tmp
+ return tmp
+}
+
+// copyExpr creates and returns a new temporary variable within fn;
+// appends statements to init to declare and initialize it to expr;
+// and escape analyzes the data flow if analyze is true.
+func (e *escape) copyExpr(pos src.XPos, expr ir.Node, init *ir.Nodes, fn *ir.Func, analyze bool) *ir.Name {
+ if ir.HasUniquePos(expr) {
+ pos = expr.Pos()
+ }
+
+ tmp := typecheck.TempAt(pos, fn, expr.Type())
+
+ stmts := []ir.Node{
+ ir.NewDecl(pos, ir.ODCL, tmp),
+ ir.NewAssignStmt(pos, tmp, expr),
+ }
+ typecheck.Stmts(stmts)
+ init.Append(stmts...)
+
+ if analyze {
+ e.newLoc(tmp, false)
+ e.stmts(stmts)
+ }
+
+ return tmp
+}
+
+// tagHole returns a hole for evaluating an argument passed to param.
+// ks should contain the holes representing where the function
+// callee's results flows. fn is the statically-known callee function,
+// if any.
+func (e *escape) tagHole(ks []hole, fn *ir.Name, param *types.Field) hole {
+ // If this is a dynamic call, we can't rely on param.Note.
+ if fn == nil {
+ return e.heapHole()
+ }
+
+ if e.inMutualBatch(fn) {
+ return e.addr(ir.AsNode(param.Nname))
+ }
+
+ // Call to previously tagged function.
+
+ var tagKs []hole
+
+ esc := parseLeaks(param.Note)
+ if x := esc.Heap(); x >= 0 {
+ tagKs = append(tagKs, e.heapHole().shift(x))
+ }
+
+ if ks != nil {
+ for i := 0; i < numEscResults; i++ {
+ if x := esc.Result(i); x >= 0 {
+ tagKs = append(tagKs, ks[i].shift(x))
+ }
+ }
+ }
+
+ return e.teeHole(tagKs...)
+}
diff --git a/src/cmd/compile/internal/escape/desugar.go b/src/cmd/compile/internal/escape/desugar.go
new file mode 100644
index 0000000000..8b3cc25cf9
--- /dev/null
+++ b/src/cmd/compile/internal/escape/desugar.go
@@ -0,0 +1,37 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package escape
+
+import (
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/typecheck"
+ "cmd/compile/internal/types"
+)
+
+// TODO(mdempsky): Desugaring doesn't belong during escape analysis,
+// but for now it's the most convenient place for some rewrites.
+
+// fixRecoverCall rewrites an ORECOVER call into ORECOVERFP,
+// adding an explicit frame pointer argument.
+// If call is not an ORECOVER call, it's left unmodified.
+func fixRecoverCall(call *ir.CallExpr) {
+ if call.Op() != ir.ORECOVER {
+ return
+ }
+
+ pos := call.Pos()
+
+ // FP is equal to caller's SP plus FixedFrameSize().
+ var fp ir.Node = ir.NewCallExpr(pos, ir.OGETCALLERSP, nil, nil)
+ if off := base.Ctxt.FixedFrameSize(); off != 0 {
+ fp = ir.NewBinaryExpr(fp.Pos(), ir.OADD, fp, ir.NewInt(off))
+ }
+ // TODO(mdempsky): Replace *int32 with unsafe.Pointer, without upsetting checkptr.
+ fp = ir.NewConvExpr(pos, ir.OCONVNOP, types.NewPtr(types.Types[types.TINT32]), fp)
+
+ call.SetOp(ir.ORECOVERFP)
+ call.Args = []ir.Node{typecheck.Expr(fp)}
+}
diff --git a/src/cmd/compile/internal/escape/escape.go b/src/cmd/compile/internal/escape/escape.go
index cd56f07b61..61e0121a40 100644
--- a/src/cmd/compile/internal/escape/escape.go
+++ b/src/cmd/compile/internal/escape/escape.go
@@ -6,15 +6,11 @@ package escape
import (
"fmt"
- "math"
- "strings"
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/logopt"
- "cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
- "cmd/internal/src"
)
// Escape analysis.
@@ -118,90 +114,8 @@ type escape struct {
loopDepth int
}
-// An location represents an abstract location that stores a Go
-// variable.
-type location struct {
- n ir.Node // represented variable or expression, if any
- curfn *ir.Func // enclosing function
- edges []edge // incoming edges
- loopDepth int // loopDepth at declaration
-
- // resultIndex records the tuple index (starting at 1) for
- // PPARAMOUT variables within their function's result type.
- // For non-PPARAMOUT variables it's 0.
- resultIndex int
-
- // derefs and walkgen are used during walkOne to track the
- // minimal dereferences from the walk root.
- derefs int // >= -1
- walkgen uint32
-
- // dst and dstEdgeindex track the next immediate assignment
- // destination location during walkone, along with the index
- // of the edge pointing back to this location.
- dst *location
- dstEdgeIdx int
-
- // queued is used by walkAll to track whether this location is
- // in the walk queue.
- queued bool
-
- // escapes reports whether the represented variable's address
- // escapes; that is, whether the variable must be heap
- // allocated.
- escapes bool
-
- // transient reports whether the represented expression's
- // address does not outlive the statement; that is, whether
- // its storage can be immediately reused.
- transient bool
-
- // paramEsc records the represented parameter's leak set.
- paramEsc leaks
-
- captured bool // has a closure captured this variable?
- reassigned bool // has this variable been reassigned?
- addrtaken bool // has this variable's address been taken?
-}
-
-// An edge represents an assignment edge between two Go variables.
-type edge struct {
- src *location
- derefs int // >= -1
- notes *note
-}
-
-// Fmt is called from node printing to print information about escape analysis results.
-func Fmt(n ir.Node) string {
- text := ""
- switch n.Esc() {
- case ir.EscUnknown:
- break
-
- case ir.EscHeap:
- text = "esc(h)"
-
- case ir.EscNone:
- text = "esc(no)"
-
- case ir.EscNever:
- text = "esc(N)"
-
- default:
- text = fmt.Sprintf("esc(%d)", n.Esc())
- }
-
- if n.Op() == ir.ONAME {
- n := n.(*ir.Name)
- if loc, ok := n.Opt.(*location); ok && loc.loopDepth != 0 {
- if text != "" {
- text += " "
- }
- text += fmt.Sprintf("ld(%d)", loc.loopDepth)
- }
- }
-
- return text
+func Funcs(all []ir.Node) {
+ ir.VisitFuncsBottomUp(all, Batch)
}
// Batch performs escape analysis on a minimal batch of
@@ -269,8 +183,14 @@ func (b *batch) initFunc(fn *ir.Func) {
// Allocate locations for local variables.
for _, n := range fn.Dcl {
- if n.Op() == ir.ONAME {
- e.newLoc(n, false)
+ e.newLoc(n, false)
+ }
+
+ // Also for hidden parameters (e.g., the ".this" parameter to a
+ // method value wrapper).
+ if fn.OClosure == nil {
+ for _, n := range fn.ClosureVars {
+ e.newLoc(n.Canonical(), false)
}
}
@@ -342,1316 +262,6 @@ func (b *batch) flowClosure(k hole, clo *ir.ClosureExpr) {
}
}
-// Below we implement the methods for walking the AST and recording
-// data flow edges. Note that because a sub-expression might have
-// side-effects, it's important to always visit the entire AST.
-//
-// For example, write either:
-//
-// if x {
-// e.discard(n.Left)
-// } else {
-// e.value(k, n.Left)
-// }
-//
-// or
-//
-// if x {
-// k = e.discardHole()
-// }
-// e.value(k, n.Left)
-//
-// Do NOT write:
-//
-// // BAD: possibly loses side-effects within n.Left
-// if !x {
-// e.value(k, n.Left)
-// }
-
-// stmt evaluates a single Go statement.
-func (e *escape) stmt(n ir.Node) {
- if n == nil {
- return
- }
-
- lno := ir.SetPos(n)
- defer func() {
- base.Pos = lno
- }()
-
- if base.Flag.LowerM > 2 {
- fmt.Printf("%v:[%d] %v stmt: %v\n", base.FmtPos(base.Pos), e.loopDepth, e.curfn, n)
- }
-
- e.stmts(n.Init())
-
- switch n.Op() {
- default:
- base.Fatalf("unexpected stmt: %v", n)
-
- case ir.ODCLCONST, ir.ODCLTYPE, ir.OFALL, ir.OINLMARK:
- // nop
-
- case ir.OBREAK, ir.OCONTINUE, ir.OGOTO:
- // TODO(mdempsky): Handle dead code?
-
- case ir.OBLOCK:
- n := n.(*ir.BlockStmt)
- e.stmts(n.List)
-
- case ir.ODCL:
- // Record loop depth at declaration.
- n := n.(*ir.Decl)
- if !ir.IsBlank(n.X) {
- e.dcl(n.X)
- }
-
- case ir.OLABEL:
- n := n.(*ir.LabelStmt)
- switch e.labels[n.Label] {
- case nonlooping:
- if base.Flag.LowerM > 2 {
- fmt.Printf("%v:%v non-looping label\n", base.FmtPos(base.Pos), n)
- }
- case looping:
- if base.Flag.LowerM > 2 {
- fmt.Printf("%v: %v looping label\n", base.FmtPos(base.Pos), n)
- }
- e.loopDepth++
- default:
- base.Fatalf("label missing tag")
- }
- delete(e.labels, n.Label)
-
- case ir.OIF:
- n := n.(*ir.IfStmt)
- e.discard(n.Cond)
- e.block(n.Body)
- e.block(n.Else)
-
- case ir.OFOR, ir.OFORUNTIL:
- n := n.(*ir.ForStmt)
- e.loopDepth++
- e.discard(n.Cond)
- e.stmt(n.Post)
- e.block(n.Body)
- e.loopDepth--
-
- case ir.ORANGE:
- // for Key, Value = range X { Body }
- n := n.(*ir.RangeStmt)
-
- // X is evaluated outside the loop.
- tmp := e.newLoc(nil, false)
- e.expr(tmp.asHole(), n.X)
-
- e.loopDepth++
- ks := e.addrs([]ir.Node{n.Key, n.Value})
- if n.X.Type().IsArray() {
- e.flow(ks[1].note(n, "range"), tmp)
- } else {
- e.flow(ks[1].deref(n, "range-deref"), tmp)
- }
- e.reassigned(ks, n)
-
- e.block(n.Body)
- e.loopDepth--
-
- case ir.OSWITCH:
- n := n.(*ir.SwitchStmt)
-
- if guard, ok := n.Tag.(*ir.TypeSwitchGuard); ok {
- var ks []hole
- if guard.Tag != nil {
- for _, cas := range n.Cases {
- cv := cas.Var
- k := e.dcl(cv) // type switch variables have no ODCL.
- if cv.Type().HasPointers() {
- ks = append(ks, k.dotType(cv.Type(), cas, "switch case"))
- }
- }
- }
- e.expr(e.teeHole(ks...), n.Tag.(*ir.TypeSwitchGuard).X)
- } else {
- e.discard(n.Tag)
- }
-
- for _, cas := range n.Cases {
- e.discards(cas.List)
- e.block(cas.Body)
- }
-
- case ir.OSELECT:
- n := n.(*ir.SelectStmt)
- for _, cas := range n.Cases {
- e.stmt(cas.Comm)
- e.block(cas.Body)
- }
- case ir.ORECV:
- // TODO(mdempsky): Consider e.discard(n.Left).
- n := n.(*ir.UnaryExpr)
- e.exprSkipInit(e.discardHole(), n) // already visited n.Ninit
- case ir.OSEND:
- n := n.(*ir.SendStmt)
- e.discard(n.Chan)
- e.assignHeap(n.Value, "send", n)
-
- case ir.OAS:
- n := n.(*ir.AssignStmt)
- e.assignList([]ir.Node{n.X}, []ir.Node{n.Y}, "assign", n)
- case ir.OASOP:
- n := n.(*ir.AssignOpStmt)
- // TODO(mdempsky): Worry about OLSH/ORSH?
- e.assignList([]ir.Node{n.X}, []ir.Node{n.Y}, "assign", n)
- case ir.OAS2:
- n := n.(*ir.AssignListStmt)
- e.assignList(n.Lhs, n.Rhs, "assign-pair", n)
-
- case ir.OAS2DOTTYPE: // v, ok = x.(type)
- n := n.(*ir.AssignListStmt)
- e.assignList(n.Lhs, n.Rhs, "assign-pair-dot-type", n)
- case ir.OAS2MAPR: // v, ok = m[k]
- n := n.(*ir.AssignListStmt)
- e.assignList(n.Lhs, n.Rhs, "assign-pair-mapr", n)
- case ir.OAS2RECV, ir.OSELRECV2: // v, ok = <-ch
- n := n.(*ir.AssignListStmt)
- e.assignList(n.Lhs, n.Rhs, "assign-pair-receive", n)
-
- case ir.OAS2FUNC:
- n := n.(*ir.AssignListStmt)
- e.stmts(n.Rhs[0].Init())
- ks := e.addrs(n.Lhs)
- e.call(ks, n.Rhs[0], nil)
- e.reassigned(ks, n)
- case ir.ORETURN:
- n := n.(*ir.ReturnStmt)
- results := e.curfn.Type().Results().FieldSlice()
- dsts := make([]ir.Node, len(results))
- for i, res := range results {
- dsts[i] = res.Nname.(*ir.Name)
- }
- e.assignList(dsts, n.Results, "return", n)
- case ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER, ir.OCLOSE, ir.OCOPY, ir.ODELETE, ir.OPANIC, ir.OPRINT, ir.OPRINTN, ir.ORECOVER:
- e.call(nil, n, nil)
- case ir.OGO, ir.ODEFER:
- n := n.(*ir.GoDeferStmt)
- e.stmts(n.Call.Init())
- e.call(nil, n.Call, n)
-
- case ir.OTAILCALL:
- // TODO(mdempsky): Treat like a normal call? esc.go used to just ignore it.
- }
-}
-
-func (e *escape) stmts(l ir.Nodes) {
- for _, n := range l {
- e.stmt(n)
- }
-}
-
-// block is like stmts, but preserves loopDepth.
-func (e *escape) block(l ir.Nodes) {
- old := e.loopDepth
- e.stmts(l)
- e.loopDepth = old
-}
-
-// expr models evaluating an expression n and flowing the result into
-// hole k.
-func (e *escape) expr(k hole, n ir.Node) {
- if n == nil {
- return
- }
- e.stmts(n.Init())
- e.exprSkipInit(k, n)
-}
-
-func (e *escape) exprSkipInit(k hole, n ir.Node) {
- if n == nil {
- return
- }
-
- lno := ir.SetPos(n)
- defer func() {
- base.Pos = lno
- }()
-
- uintptrEscapesHack := k.uintptrEscapesHack
- k.uintptrEscapesHack = false
-
- if uintptrEscapesHack && n.Op() == ir.OCONVNOP && n.(*ir.ConvExpr).X.Type().IsUnsafePtr() {
- // nop
- } else if k.derefs >= 0 && !n.Type().HasPointers() {
- k.dst = &e.blankLoc
- }
-
- switch n.Op() {
- default:
- base.Fatalf("unexpected expr: %s %v", n.Op().String(), n)
-
- case ir.OLITERAL, ir.ONIL, ir.OGETG, ir.OTYPE, ir.OMETHEXPR, ir.OLINKSYMOFFSET:
- // nop
-
- case ir.ONAME:
- n := n.(*ir.Name)
- if n.Class == ir.PFUNC || n.Class == ir.PEXTERN {
- return
- }
- if n.IsClosureVar() && n.Defn == nil {
- return // ".this" from method value wrapper
- }
- e.flow(k, e.oldLoc(n))
-
- case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT:
- n := n.(*ir.UnaryExpr)
- e.discard(n.X)
- case ir.OADD, ir.OSUB, ir.OOR, ir.OXOR, ir.OMUL, ir.ODIV, ir.OMOD, ir.OLSH, ir.ORSH, ir.OAND, ir.OANDNOT, ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
- n := n.(*ir.BinaryExpr)
- e.discard(n.X)
- e.discard(n.Y)
- case ir.OANDAND, ir.OOROR:
- n := n.(*ir.LogicalExpr)
- e.discard(n.X)
- e.discard(n.Y)
- case ir.OADDR:
- n := n.(*ir.AddrExpr)
- e.expr(k.addr(n, "address-of"), n.X) // "address-of"
- case ir.ODEREF:
- n := n.(*ir.StarExpr)
- e.expr(k.deref(n, "indirection"), n.X) // "indirection"
- case ir.ODOT, ir.ODOTMETH, ir.ODOTINTER:
- n := n.(*ir.SelectorExpr)
- e.expr(k.note(n, "dot"), n.X)
- case ir.ODOTPTR:
- n := n.(*ir.SelectorExpr)
- e.expr(k.deref(n, "dot of pointer"), n.X) // "dot of pointer"
- case ir.ODOTTYPE, ir.ODOTTYPE2:
- n := n.(*ir.TypeAssertExpr)
- e.expr(k.dotType(n.Type(), n, "dot"), n.X)
- case ir.OINDEX:
- n := n.(*ir.IndexExpr)
- if n.X.Type().IsArray() {
- e.expr(k.note(n, "fixed-array-index-of"), n.X)
- } else {
- // TODO(mdempsky): Fix why reason text.
- e.expr(k.deref(n, "dot of pointer"), n.X)
- }
- e.discard(n.Index)
- case ir.OINDEXMAP:
- n := n.(*ir.IndexExpr)
- e.discard(n.X)
- e.discard(n.Index)
- case ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR, ir.OSLICESTR:
- n := n.(*ir.SliceExpr)
- e.expr(k.note(n, "slice"), n.X)
- e.discard(n.Low)
- e.discard(n.High)
- e.discard(n.Max)
-
- case ir.OCONV, ir.OCONVNOP:
- n := n.(*ir.ConvExpr)
- if ir.ShouldCheckPtr(e.curfn, 2) && n.Type().IsUnsafePtr() && n.X.Type().IsPtr() {
- // When -d=checkptr=2 is enabled, treat
- // conversions to unsafe.Pointer as an
- // escaping operation. This allows better
- // runtime instrumentation, since we can more
- // easily detect object boundaries on the heap
- // than the stack.
- e.assignHeap(n.X, "conversion to unsafe.Pointer", n)
- } else if n.Type().IsUnsafePtr() && n.X.Type().IsUintptr() {
- e.unsafeValue(k, n.X)
- } else {
- e.expr(k, n.X)
- }
- case ir.OCONVIFACE:
- n := n.(*ir.ConvExpr)
- if !n.X.Type().IsInterface() && !types.IsDirectIface(n.X.Type()) {
- k = e.spill(k, n)
- }
- e.expr(k.note(n, "interface-converted"), n.X)
- case ir.OSLICE2ARRPTR:
- // the slice pointer flows directly to the result
- n := n.(*ir.ConvExpr)
- e.expr(k, n.X)
- case ir.ORECV:
- n := n.(*ir.UnaryExpr)
- e.discard(n.X)
-
- case ir.OCALLMETH, ir.OCALLFUNC, ir.OCALLINTER, ir.OLEN, ir.OCAP, ir.OCOMPLEX, ir.OREAL, ir.OIMAG, ir.OAPPEND, ir.OCOPY, ir.OUNSAFEADD, ir.OUNSAFESLICE:
- e.call([]hole{k}, n, nil)
-
- case ir.ONEW:
- n := n.(*ir.UnaryExpr)
- e.spill(k, n)
-
- case ir.OMAKESLICE:
- n := n.(*ir.MakeExpr)
- e.spill(k, n)
- e.discard(n.Len)
- e.discard(n.Cap)
- case ir.OMAKECHAN:
- n := n.(*ir.MakeExpr)
- e.discard(n.Len)
- case ir.OMAKEMAP:
- n := n.(*ir.MakeExpr)
- e.spill(k, n)
- e.discard(n.Len)
-
- case ir.ORECOVER:
- // nop
-
- case ir.OCALLPART:
- // Flow the receiver argument to both the closure and
- // to the receiver parameter.
-
- n := n.(*ir.SelectorExpr)
- closureK := e.spill(k, n)
-
- m := n.Selection
-
- // We don't know how the method value will be called
- // later, so conservatively assume the result
- // parameters all flow to the heap.
- //
- // TODO(mdempsky): Change ks into a callback, so that
- // we don't have to create this slice?
- var ks []hole
- for i := m.Type.NumResults(); i > 0; i-- {
- ks = append(ks, e.heapHole())
- }
- name, _ := m.Nname.(*ir.Name)
- paramK := e.tagHole(ks, name, m.Type.Recv())
-
- e.expr(e.teeHole(paramK, closureK), n.X)
-
- case ir.OPTRLIT:
- n := n.(*ir.AddrExpr)
- e.expr(e.spill(k, n), n.X)
-
- case ir.OARRAYLIT:
- n := n.(*ir.CompLitExpr)
- for _, elt := range n.List {
- if elt.Op() == ir.OKEY {
- elt = elt.(*ir.KeyExpr).Value
- }
- e.expr(k.note(n, "array literal element"), elt)
- }
-
- case ir.OSLICELIT:
- n := n.(*ir.CompLitExpr)
- k = e.spill(k, n)
- k.uintptrEscapesHack = uintptrEscapesHack // for ...uintptr parameters
-
- for _, elt := range n.List {
- if elt.Op() == ir.OKEY {
- elt = elt.(*ir.KeyExpr).Value
- }
- e.expr(k.note(n, "slice-literal-element"), elt)
- }
-
- case ir.OSTRUCTLIT:
- n := n.(*ir.CompLitExpr)
- for _, elt := range n.List {
- e.expr(k.note(n, "struct literal element"), elt.(*ir.StructKeyExpr).Value)
- }
-
- case ir.OMAPLIT:
- n := n.(*ir.CompLitExpr)
- e.spill(k, n)
-
- // Map keys and values are always stored in the heap.
- for _, elt := range n.List {
- elt := elt.(*ir.KeyExpr)
- e.assignHeap(elt.Key, "map literal key", n)
- e.assignHeap(elt.Value, "map literal value", n)
- }
-
- case ir.OCLOSURE:
- n := n.(*ir.ClosureExpr)
- k = e.spill(k, n)
- e.closures = append(e.closures, closure{k, n})
-
- if fn := n.Func; fn.IsHiddenClosure() {
- for _, cv := range fn.ClosureVars {
- if loc := e.oldLoc(cv); !loc.captured {
- loc.captured = true
-
- // Ignore reassignments to the variable in straightline code
- // preceding the first capture by a closure.
- if loc.loopDepth == e.loopDepth {
- loc.reassigned = false
- }
- }
- }
-
- for _, n := range fn.Dcl {
- // Add locations for local variables of the
- // closure, if needed, in case we're not including
- // the closure func in the batch for escape
- // analysis (happens for escape analysis called
- // from reflectdata.methodWrapper)
- if n.Op() == ir.ONAME && n.Opt == nil {
- e.with(fn).newLoc(n, false)
- }
- }
- e.walkFunc(fn)
- }
-
- case ir.ORUNES2STR, ir.OBYTES2STR, ir.OSTR2RUNES, ir.OSTR2BYTES, ir.ORUNESTR:
- n := n.(*ir.ConvExpr)
- e.spill(k, n)
- e.discard(n.X)
-
- case ir.OADDSTR:
- n := n.(*ir.AddStringExpr)
- e.spill(k, n)
-
- // Arguments of OADDSTR never escape;
- // runtime.concatstrings makes sure of that.
- e.discards(n.List)
- }
-}
-
-// unsafeValue evaluates a uintptr-typed arithmetic expression looking
-// for conversions from an unsafe.Pointer.
-func (e *escape) unsafeValue(k hole, n ir.Node) {
- if n.Type().Kind() != types.TUINTPTR {
- base.Fatalf("unexpected type %v for %v", n.Type(), n)
- }
- if k.addrtaken {
- base.Fatalf("unexpected addrtaken")
- }
-
- e.stmts(n.Init())
-
- switch n.Op() {
- case ir.OCONV, ir.OCONVNOP:
- n := n.(*ir.ConvExpr)
- if n.X.Type().IsUnsafePtr() {
- e.expr(k, n.X)
- } else {
- e.discard(n.X)
- }
- case ir.ODOTPTR:
- n := n.(*ir.SelectorExpr)
- if ir.IsReflectHeaderDataField(n) {
- e.expr(k.deref(n, "reflect.Header.Data"), n.X)
- } else {
- e.discard(n.X)
- }
- case ir.OPLUS, ir.ONEG, ir.OBITNOT:
- n := n.(*ir.UnaryExpr)
- e.unsafeValue(k, n.X)
- case ir.OADD, ir.OSUB, ir.OOR, ir.OXOR, ir.OMUL, ir.ODIV, ir.OMOD, ir.OAND, ir.OANDNOT:
- n := n.(*ir.BinaryExpr)
- e.unsafeValue(k, n.X)
- e.unsafeValue(k, n.Y)
- case ir.OLSH, ir.ORSH:
- n := n.(*ir.BinaryExpr)
- e.unsafeValue(k, n.X)
- // RHS need not be uintptr-typed (#32959) and can't meaningfully
- // flow pointers anyway.
- e.discard(n.Y)
- default:
- e.exprSkipInit(e.discardHole(), n)
- }
-}
-
-// discard evaluates an expression n for side-effects, but discards
-// its value.
-func (e *escape) discard(n ir.Node) {
- e.expr(e.discardHole(), n)
-}
-
-func (e *escape) discards(l ir.Nodes) {
- for _, n := range l {
- e.discard(n)
- }
-}
-
-// addr evaluates an addressable expression n and returns a hole
-// that represents storing into the represented location.
-func (e *escape) addr(n ir.Node) hole {
- if n == nil || ir.IsBlank(n) {
- // Can happen in select case, range, maybe others.
- return e.discardHole()
- }
-
- k := e.heapHole()
-
- switch n.Op() {
- default:
- base.Fatalf("unexpected addr: %v", n)
- case ir.ONAME:
- n := n.(*ir.Name)
- if n.Class == ir.PEXTERN {
- break
- }
- k = e.oldLoc(n).asHole()
- case ir.OLINKSYMOFFSET:
- break
- case ir.ODOT:
- n := n.(*ir.SelectorExpr)
- k = e.addr(n.X)
- case ir.OINDEX:
- n := n.(*ir.IndexExpr)
- e.discard(n.Index)
- if n.X.Type().IsArray() {
- k = e.addr(n.X)
- } else {
- e.discard(n.X)
- }
- case ir.ODEREF, ir.ODOTPTR:
- e.discard(n)
- case ir.OINDEXMAP:
- n := n.(*ir.IndexExpr)
- e.discard(n.X)
- e.assignHeap(n.Index, "key of map put", n)
- }
-
- return k
-}
-
-func (e *escape) addrs(l ir.Nodes) []hole {
- var ks []hole
- for _, n := range l {
- ks = append(ks, e.addr(n))
- }
- return ks
-}
-
-// reassigned marks the locations associated with the given holes as
-// reassigned, unless the location represents a variable declared and
-// assigned exactly once by where.
-func (e *escape) reassigned(ks []hole, where ir.Node) {
- if as, ok := where.(*ir.AssignStmt); ok && as.Op() == ir.OAS && as.Y == nil {
- if dst, ok := as.X.(*ir.Name); ok && dst.Op() == ir.ONAME && dst.Defn == nil {
- // Zero-value assignment for variable declared without an
- // explicit initial value. Assume this is its initialization
- // statement.
- return
- }
- }
-
- for _, k := range ks {
- loc := k.dst
- // Variables declared by range statements are assigned on every iteration.
- if n, ok := loc.n.(*ir.Name); ok && n.Defn == where && where.Op() != ir.ORANGE {
- continue
- }
- loc.reassigned = true
- }
-}
-
-// assignList evaluates the assignment dsts... = srcs....
-func (e *escape) assignList(dsts, srcs []ir.Node, why string, where ir.Node) {
- ks := e.addrs(dsts)
- for i, k := range ks {
- var src ir.Node
- if i < len(srcs) {
- src = srcs[i]
- }
-
- if dst := dsts[i]; dst != nil {
- // Detect implicit conversion of uintptr to unsafe.Pointer when
- // storing into reflect.{Slice,String}Header.
- if dst.Op() == ir.ODOTPTR && ir.IsReflectHeaderDataField(dst) {
- e.unsafeValue(e.heapHole().note(where, why), src)
- continue
- }
-
- // Filter out some no-op assignments for escape analysis.
- if src != nil && isSelfAssign(dst, src) {
- if base.Flag.LowerM != 0 {
- base.WarnfAt(where.Pos(), "%v ignoring self-assignment in %v", e.curfn, where)
- }
- k = e.discardHole()
- }
- }
-
- e.expr(k.note(where, why), src)
- }
-
- e.reassigned(ks, where)
-}
-
-func (e *escape) assignHeap(src ir.Node, why string, where ir.Node) {
- e.expr(e.heapHole().note(where, why), src)
-}
-
-// call evaluates a call expressions, including builtin calls. ks
-// should contain the holes representing where the function callee's
-// results flows; where is the OGO/ODEFER context of the call, if any.
-func (e *escape) call(ks []hole, call, where ir.Node) {
- topLevelDefer := where != nil && where.Op() == ir.ODEFER && e.loopDepth == 1
- if topLevelDefer {
- // force stack allocation of defer record, unless
- // open-coded defers are used (see ssa.go)
- where.SetEsc(ir.EscNever)
- }
-
- argument := func(k hole, arg ir.Node) {
- if topLevelDefer {
- // Top level defers arguments don't escape to
- // heap, but they do need to last until end of
- // function.
- k = e.later(k)
- } else if where != nil {
- k = e.heapHole()
- }
-
- e.expr(k.note(call, "call parameter"), arg)
- }
-
- switch call.Op() {
- default:
- ir.Dump("esc", call)
- base.Fatalf("unexpected call op: %v", call.Op())
-
- case ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER:
- call := call.(*ir.CallExpr)
- typecheck.FixVariadicCall(call)
-
- // Pick out the function callee, if statically known.
- var fn *ir.Name
- switch call.Op() {
- case ir.OCALLFUNC:
- switch v := ir.StaticValue(call.X); {
- case v.Op() == ir.ONAME && v.(*ir.Name).Class == ir.PFUNC:
- fn = v.(*ir.Name)
- case v.Op() == ir.OCLOSURE:
- fn = v.(*ir.ClosureExpr).Func.Nname
- }
- case ir.OCALLMETH:
- fn = ir.MethodExprName(call.X)
- }
-
- fntype := call.X.Type()
- if fn != nil {
- fntype = fn.Type()
- }
-
- if ks != nil && fn != nil && e.inMutualBatch(fn) {
- for i, result := range fn.Type().Results().FieldSlice() {
- e.expr(ks[i], ir.AsNode(result.Nname))
- }
- }
-
- if r := fntype.Recv(); r != nil {
- argument(e.tagHole(ks, fn, r), call.X.(*ir.SelectorExpr).X)
- } else {
- // Evaluate callee function expression.
- argument(e.discardHole(), call.X)
- }
-
- args := call.Args
- for i, param := range fntype.Params().FieldSlice() {
- argument(e.tagHole(ks, fn, param), args[i])
- }
-
- case ir.OAPPEND:
- call := call.(*ir.CallExpr)
- args := call.Args
-
- // Appendee slice may flow directly to the result, if
- // it has enough capacity. Alternatively, a new heap
- // slice might be allocated, and all slice elements
- // might flow to heap.
- appendeeK := ks[0]
- if args[0].Type().Elem().HasPointers() {
- appendeeK = e.teeHole(appendeeK, e.heapHole().deref(call, "appendee slice"))
- }
- argument(appendeeK, args[0])
-
- if call.IsDDD {
- appendedK := e.discardHole()
- if args[1].Type().IsSlice() && args[1].Type().Elem().HasPointers() {
- appendedK = e.heapHole().deref(call, "appended slice...")
- }
- argument(appendedK, args[1])
- } else {
- for _, arg := range args[1:] {
- argument(e.heapHole(), arg)
- }
- }
-
- case ir.OCOPY:
- call := call.(*ir.BinaryExpr)
- argument(e.discardHole(), call.X)
-
- copiedK := e.discardHole()
- if call.Y.Type().IsSlice() && call.Y.Type().Elem().HasPointers() {
- copiedK = e.heapHole().deref(call, "copied slice")
- }
- argument(copiedK, call.Y)
-
- case ir.OPANIC:
- call := call.(*ir.UnaryExpr)
- argument(e.heapHole(), call.X)
-
- case ir.OCOMPLEX:
- call := call.(*ir.BinaryExpr)
- argument(e.discardHole(), call.X)
- argument(e.discardHole(), call.Y)
- case ir.ODELETE, ir.OPRINT, ir.OPRINTN, ir.ORECOVER:
- call := call.(*ir.CallExpr)
- for _, arg := range call.Args {
- argument(e.discardHole(), arg)
- }
- case ir.OLEN, ir.OCAP, ir.OREAL, ir.OIMAG, ir.OCLOSE:
- call := call.(*ir.UnaryExpr)
- argument(e.discardHole(), call.X)
-
- case ir.OUNSAFEADD, ir.OUNSAFESLICE:
- call := call.(*ir.BinaryExpr)
- argument(ks[0], call.X)
- argument(e.discardHole(), call.Y)
- }
-}
-
-// tagHole returns a hole for evaluating an argument passed to param.
-// ks should contain the holes representing where the function
-// callee's results flows. fn is the statically-known callee function,
-// if any.
-func (e *escape) tagHole(ks []hole, fn *ir.Name, param *types.Field) hole {
- // If this is a dynamic call, we can't rely on param.Note.
- if fn == nil {
- return e.heapHole()
- }
-
- if e.inMutualBatch(fn) {
- return e.addr(ir.AsNode(param.Nname))
- }
-
- // Call to previously tagged function.
-
- if param.Note == UintptrEscapesNote {
- k := e.heapHole()
- k.uintptrEscapesHack = true
- return k
- }
-
- var tagKs []hole
-
- esc := parseLeaks(param.Note)
- if x := esc.Heap(); x >= 0 {
- tagKs = append(tagKs, e.heapHole().shift(x))
- }
-
- if ks != nil {
- for i := 0; i < numEscResults; i++ {
- if x := esc.Result(i); x >= 0 {
- tagKs = append(tagKs, ks[i].shift(x))
- }
- }
- }
-
- return e.teeHole(tagKs...)
-}
-
-// inMutualBatch reports whether function fn is in the batch of
-// mutually recursive functions being analyzed. When this is true,
-// fn has not yet been analyzed, so its parameters and results
-// should be incorporated directly into the flow graph instead of
-// relying on its escape analysis tagging.
-func (e *escape) inMutualBatch(fn *ir.Name) bool {
- if fn.Defn != nil && fn.Defn.Esc() < escFuncTagged {
- if fn.Defn.Esc() == escFuncUnknown {
- base.Fatalf("graph inconsistency: %v", fn)
- }
- return true
- }
- return false
-}
-
-// An hole represents a context for evaluation a Go
-// expression. E.g., when evaluating p in "x = **p", we'd have a hole
-// with dst==x and derefs==2.
-type hole struct {
- dst *location
- derefs int // >= -1
- notes *note
-
- // addrtaken indicates whether this context is taking the address of
- // the expression, independent of whether the address will actually
- // be stored into a variable.
- addrtaken bool
-
- // uintptrEscapesHack indicates this context is evaluating an
- // argument for a //go:uintptrescapes function.
- uintptrEscapesHack bool
-}
-
-type note struct {
- next *note
- where ir.Node
- why string
-}
-
-func (k hole) note(where ir.Node, why string) hole {
- if where == nil || why == "" {
- base.Fatalf("note: missing where/why")
- }
- if base.Flag.LowerM >= 2 || logopt.Enabled() {
- k.notes = &note{
- next: k.notes,
- where: where,
- why: why,
- }
- }
- return k
-}
-
-func (k hole) shift(delta int) hole {
- k.derefs += delta
- if k.derefs < -1 {
- base.Fatalf("derefs underflow: %v", k.derefs)
- }
- k.addrtaken = delta < 0
- return k
-}
-
-func (k hole) deref(where ir.Node, why string) hole { return k.shift(1).note(where, why) }
-func (k hole) addr(where ir.Node, why string) hole { return k.shift(-1).note(where, why) }
-
-func (k hole) dotType(t *types.Type, where ir.Node, why string) hole {
- if !t.IsInterface() && !types.IsDirectIface(t) {
- k = k.shift(1)
- }
- return k.note(where, why)
-}
-
-// teeHole returns a new hole that flows into each hole of ks,
-// similar to the Unix tee(1) command.
-func (e *escape) teeHole(ks ...hole) hole {
- if len(ks) == 0 {
- return e.discardHole()
- }
- if len(ks) == 1 {
- return ks[0]
- }
- // TODO(mdempsky): Optimize if there's only one non-discard hole?
-
- // Given holes "l1 = _", "l2 = **_", "l3 = *_", ..., create a
- // new temporary location ltmp, wire it into place, and return
- // a hole for "ltmp = _".
- loc := e.newLoc(nil, true)
- for _, k := range ks {
- // N.B., "p = &q" and "p = &tmp; tmp = q" are not
- // semantically equivalent. To combine holes like "l1
- // = _" and "l2 = &_", we'd need to wire them as "l1 =
- // *ltmp" and "l2 = ltmp" and return "ltmp = &_"
- // instead.
- if k.derefs < 0 {
- base.Fatalf("teeHole: negative derefs")
- }
-
- e.flow(k, loc)
- }
- return loc.asHole()
-}
-
-func (e *escape) dcl(n *ir.Name) hole {
- if n.Curfn != e.curfn || n.IsClosureVar() {
- base.Fatalf("bad declaration of %v", n)
- }
- loc := e.oldLoc(n)
- loc.loopDepth = e.loopDepth
- return loc.asHole()
-}
-
-// spill allocates a new location associated with expression n, flows
-// its address to k, and returns a hole that flows values to it. It's
-// intended for use with most expressions that allocate storage.
-func (e *escape) spill(k hole, n ir.Node) hole {
- loc := e.newLoc(n, true)
- e.flow(k.addr(n, "spill"), loc)
- return loc.asHole()
-}
-
-// later returns a new hole that flows into k, but some time later.
-// Its main effect is to prevent immediate reuse of temporary
-// variables introduced during Order.
-func (e *escape) later(k hole) hole {
- loc := e.newLoc(nil, false)
- e.flow(k, loc)
- return loc.asHole()
-}
-
-func (e *escape) newLoc(n ir.Node, transient bool) *location {
- if e.curfn == nil {
- base.Fatalf("e.curfn isn't set")
- }
- if n != nil && n.Type() != nil && n.Type().NotInHeap() {
- base.ErrorfAt(n.Pos(), "%v is incomplete (or unallocatable); stack allocation disallowed", n.Type())
- }
-
- if n != nil && n.Op() == ir.ONAME {
- n = n.(*ir.Name).Canonical()
- }
- loc := &location{
- n: n,
- curfn: e.curfn,
- loopDepth: e.loopDepth,
- transient: transient,
- }
- e.allLocs = append(e.allLocs, loc)
- if n != nil {
- if n.Op() == ir.ONAME {
- n := n.(*ir.Name)
- if n.Curfn != e.curfn {
- base.Fatalf("curfn mismatch: %v != %v for %v", n.Curfn, e.curfn, n)
- }
-
- if n.Opt != nil {
- base.Fatalf("%v already has a location", n)
- }
- n.Opt = loc
- }
- }
- return loc
-}
-
-func (b *batch) oldLoc(n *ir.Name) *location {
- if n.Canonical().Opt == nil {
- base.Fatalf("%v has no location", n)
- }
- return n.Canonical().Opt.(*location)
-}
-
-func (l *location) asHole() hole {
- return hole{dst: l}
-}
-
-func (b *batch) flow(k hole, src *location) {
- if k.addrtaken {
- src.addrtaken = true
- }
-
- dst := k.dst
- if dst == &b.blankLoc {
- return
- }
- if dst == src && k.derefs >= 0 { // dst = dst, dst = *dst, ...
- return
- }
- if dst.escapes && k.derefs < 0 { // dst = &src
- if base.Flag.LowerM >= 2 || logopt.Enabled() {
- pos := base.FmtPos(src.n.Pos())
- if base.Flag.LowerM >= 2 {
- fmt.Printf("%s: %v escapes to heap:\n", pos, src.n)
- }
- explanation := b.explainFlow(pos, dst, src, k.derefs, k.notes, []*logopt.LoggedOpt{})
- if logopt.Enabled() {
- var e_curfn *ir.Func // TODO(mdempsky): Fix.
- logopt.LogOpt(src.n.Pos(), "escapes", "escape", ir.FuncName(e_curfn), fmt.Sprintf("%v escapes to heap", src.n), explanation)
- }
-
- }
- src.escapes = true
- return
- }
-
- // TODO(mdempsky): Deduplicate edges?
- dst.edges = append(dst.edges, edge{src: src, derefs: k.derefs, notes: k.notes})
-}
-
-func (b *batch) heapHole() hole { return b.heapLoc.asHole() }
-func (b *batch) discardHole() hole { return b.blankLoc.asHole() }
-
-// walkAll computes the minimal dereferences between all pairs of
-// locations.
-func (b *batch) walkAll() {
- // We use a work queue to keep track of locations that we need
- // to visit, and repeatedly walk until we reach a fixed point.
- //
- // We walk once from each location (including the heap), and
- // then re-enqueue each location on its transition from
- // transient->!transient and !escapes->escapes, which can each
- // happen at most once. So we take Θ(len(e.allLocs)) walks.
-
- // LIFO queue, has enough room for e.allLocs and e.heapLoc.
- todo := make([]*location, 0, len(b.allLocs)+1)
- enqueue := func(loc *location) {
- if !loc.queued {
- todo = append(todo, loc)
- loc.queued = true
- }
- }
-
- for _, loc := range b.allLocs {
- enqueue(loc)
- }
- enqueue(&b.heapLoc)
-
- var walkgen uint32
- for len(todo) > 0 {
- root := todo[len(todo)-1]
- todo = todo[:len(todo)-1]
- root.queued = false
-
- walkgen++
- b.walkOne(root, walkgen, enqueue)
- }
-}
-
-// walkOne computes the minimal number of dereferences from root to
-// all other locations.
-func (b *batch) walkOne(root *location, walkgen uint32, enqueue func(*location)) {
- // The data flow graph has negative edges (from addressing
- // operations), so we use the Bellman-Ford algorithm. However,
- // we don't have to worry about infinite negative cycles since
- // we bound intermediate dereference counts to 0.
-
- root.walkgen = walkgen
- root.derefs = 0
- root.dst = nil
-
- todo := []*location{root} // LIFO queue
- for len(todo) > 0 {
- l := todo[len(todo)-1]
- todo = todo[:len(todo)-1]
-
- derefs := l.derefs
-
- // If l.derefs < 0, then l's address flows to root.
- addressOf := derefs < 0
- if addressOf {
- // For a flow path like "root = &l; l = x",
- // l's address flows to root, but x's does
- // not. We recognize this by lower bounding
- // derefs at 0.
- derefs = 0
-
- // If l's address flows to a non-transient
- // location, then l can't be transiently
- // allocated.
- if !root.transient && l.transient {
- l.transient = false
- enqueue(l)
- }
- }
-
- if b.outlives(root, l) {
- // l's value flows to root. If l is a function
- // parameter and root is the heap or a
- // corresponding result parameter, then record
- // that value flow for tagging the function
- // later.
- if l.isName(ir.PPARAM) {
- if (logopt.Enabled() || base.Flag.LowerM >= 2) && !l.escapes {
- if base.Flag.LowerM >= 2 {
- fmt.Printf("%s: parameter %v leaks to %s with derefs=%d:\n", base.FmtPos(l.n.Pos()), l.n, b.explainLoc(root), derefs)
- }
- explanation := b.explainPath(root, l)
- if logopt.Enabled() {
- var e_curfn *ir.Func // TODO(mdempsky): Fix.
- logopt.LogOpt(l.n.Pos(), "leak", "escape", ir.FuncName(e_curfn),
- fmt.Sprintf("parameter %v leaks to %s with derefs=%d", l.n, b.explainLoc(root), derefs), explanation)
- }
- }
- l.leakTo(root, derefs)
- }
-
- // If l's address flows somewhere that
- // outlives it, then l needs to be heap
- // allocated.
- if addressOf && !l.escapes {
- if logopt.Enabled() || base.Flag.LowerM >= 2 {
- if base.Flag.LowerM >= 2 {
- fmt.Printf("%s: %v escapes to heap:\n", base.FmtPos(l.n.Pos()), l.n)
- }
- explanation := b.explainPath(root, l)
- if logopt.Enabled() {
- var e_curfn *ir.Func // TODO(mdempsky): Fix.
- logopt.LogOpt(l.n.Pos(), "escape", "escape", ir.FuncName(e_curfn), fmt.Sprintf("%v escapes to heap", l.n), explanation)
- }
- }
- l.escapes = true
- enqueue(l)
- continue
- }
- }
-
- for i, edge := range l.edges {
- if edge.src.escapes {
- continue
- }
- d := derefs + edge.derefs
- if edge.src.walkgen != walkgen || edge.src.derefs > d {
- edge.src.walkgen = walkgen
- edge.src.derefs = d
- edge.src.dst = l
- edge.src.dstEdgeIdx = i
- todo = append(todo, edge.src)
- }
- }
- }
-}
-
-// explainPath prints an explanation of how src flows to the walk root.
-func (b *batch) explainPath(root, src *location) []*logopt.LoggedOpt {
- visited := make(map[*location]bool)
- pos := base.FmtPos(src.n.Pos())
- var explanation []*logopt.LoggedOpt
- for {
- // Prevent infinite loop.
- if visited[src] {
- if base.Flag.LowerM >= 2 {
- fmt.Printf("%s: warning: truncated explanation due to assignment cycle; see golang.org/issue/35518\n", pos)
- }
- break
- }
- visited[src] = true
- dst := src.dst
- edge := &dst.edges[src.dstEdgeIdx]
- if edge.src != src {
- base.Fatalf("path inconsistency: %v != %v", edge.src, src)
- }
-
- explanation = b.explainFlow(pos, dst, src, edge.derefs, edge.notes, explanation)
-
- if dst == root {
- break
- }
- src = dst
- }
-
- return explanation
-}
-
-func (b *batch) explainFlow(pos string, dst, srcloc *location, derefs int, notes *note, explanation []*logopt.LoggedOpt) []*logopt.LoggedOpt {
- ops := "&"
- if derefs >= 0 {
- ops = strings.Repeat("*", derefs)
- }
- print := base.Flag.LowerM >= 2
-
- flow := fmt.Sprintf(" flow: %s = %s%v:", b.explainLoc(dst), ops, b.explainLoc(srcloc))
- if print {
- fmt.Printf("%s:%s\n", pos, flow)
- }
- if logopt.Enabled() {
- var epos src.XPos
- if notes != nil {
- epos = notes.where.Pos()
- } else if srcloc != nil && srcloc.n != nil {
- epos = srcloc.n.Pos()
- }
- var e_curfn *ir.Func // TODO(mdempsky): Fix.
- explanation = append(explanation, logopt.NewLoggedOpt(epos, "escflow", "escape", ir.FuncName(e_curfn), flow))
- }
-
- for note := notes; note != nil; note = note.next {
- if print {
- fmt.Printf("%s: from %v (%v) at %s\n", pos, note.where, note.why, base.FmtPos(note.where.Pos()))
- }
- if logopt.Enabled() {
- var e_curfn *ir.Func // TODO(mdempsky): Fix.
- explanation = append(explanation, logopt.NewLoggedOpt(note.where.Pos(), "escflow", "escape", ir.FuncName(e_curfn),
- fmt.Sprintf(" from %v (%v)", note.where, note.why)))
- }
- }
- return explanation
-}
-
-func (b *batch) explainLoc(l *location) string {
- if l == &b.heapLoc {
- return "{heap}"
- }
- if l.n == nil {
- // TODO(mdempsky): Omit entirely.
- return "{temp}"
- }
- if l.n.Op() == ir.ONAME {
- return fmt.Sprintf("%v", l.n)
- }
- return fmt.Sprintf("{storage for %v}", l.n)
-}
-
-// outlives reports whether values stored in l may survive beyond
-// other's lifetime if stack allocated.
-func (b *batch) outlives(l, other *location) bool {
- // The heap outlives everything.
- if l.escapes {
- return true
- }
-
- // We don't know what callers do with returned values, so
- // pessimistically we need to assume they flow to the heap and
- // outlive everything too.
- if l.isName(ir.PPARAMOUT) {
- // Exception: Directly called closures can return
- // locations allocated outside of them without forcing
- // them to the heap. For example:
- //
- // var u int // okay to stack allocate
- // *(func() *int { return &u }()) = 42
- if containsClosure(other.curfn, l.curfn) && l.curfn.ClosureCalled() {
- return false
- }
-
- return true
- }
-
- // If l and other are within the same function, then l
- // outlives other if it was declared outside other's loop
- // scope. For example:
- //
- // var l *int
- // for {
- // l = new(int)
- // }
- if l.curfn == other.curfn && l.loopDepth < other.loopDepth {
- return true
- }
-
- // If other is declared within a child closure of where l is
- // declared, then l outlives it. For example:
- //
- // var l *int
- // func() {
- // l = new(int)
- // }
- if containsClosure(l.curfn, other.curfn) {
- return true
- }
-
- return false
-}
-
-// containsClosure reports whether c is a closure contained within f.
-func containsClosure(f, c *ir.Func) bool {
- // Common case.
- if f == c {
- return false
- }
-
- // Closures within function Foo are named like "Foo.funcN..."
- // TODO(mdempsky): Better way to recognize this.
- fn := f.Sym().Name
- cn := c.Sym().Name
- return len(cn) > len(fn) && cn[:len(fn)] == fn && cn[len(fn)] == '.'
-}
-
-// leak records that parameter l leaks to sink.
-func (l *location) leakTo(sink *location, derefs int) {
- // If sink is a result parameter that doesn't escape (#44614)
- // and we can fit return bits into the escape analysis tag,
- // then record as a result leak.
- if !sink.escapes && sink.isName(ir.PPARAMOUT) && sink.curfn == l.curfn {
- ri := sink.resultIndex - 1
- if ri < numEscResults {
- // Leak to result parameter.
- l.paramEsc.AddResult(ri, derefs)
- return
- }
- }
-
- // Otherwise, record as heap leak.
- l.paramEsc.AddHeap(derefs)
-}
-
func (b *batch) finish(fns []*ir.Func) {
// Record parameter tags for package export data.
for _, fn := range fns {
@@ -1678,6 +288,11 @@ func (b *batch) finish(fns []*ir.Func) {
// Update n.Esc based on escape analysis results.
+ // Omit escape diagnostics for go/defer wrappers, at least for now.
+ // Historically, we haven't printed them, and test cases don't expect them.
+ // TODO(mdempsky): Update tests to expect this.
+ goDeferWrapper := n.Op() == ir.OCLOSURE && n.(*ir.ClosureExpr).Func.Wrapper()
+
if loc.escapes {
if n.Op() == ir.ONAME {
if base.Flag.CompilingRuntime {
@@ -1687,7 +302,7 @@ func (b *batch) finish(fns []*ir.Func) {
base.WarnfAt(n.Pos(), "moved to heap: %v", n)
}
} else {
- if base.Flag.LowerM != 0 {
+ if base.Flag.LowerM != 0 && !goDeferWrapper {
base.WarnfAt(n.Pos(), "%v escapes to heap", n)
}
if logopt.Enabled() {
@@ -1697,7 +312,7 @@ func (b *batch) finish(fns []*ir.Func) {
}
n.SetEsc(ir.EscHeap)
} else {
- if base.Flag.LowerM != 0 && n.Op() != ir.ONAME {
+ if base.Flag.LowerM != 0 && n.Op() != ir.ONAME && !goDeferWrapper {
base.WarnfAt(n.Pos(), "%v does not escape", n)
}
n.SetEsc(ir.EscNone)
@@ -1706,7 +321,7 @@ func (b *batch) finish(fns []*ir.Func) {
case ir.OCLOSURE:
n := n.(*ir.ClosureExpr)
n.SetTransient(true)
- case ir.OCALLPART:
+ case ir.OMETHVALUE:
n := n.(*ir.SelectorExpr)
n.SetTransient(true)
case ir.OSLICELIT:
@@ -1718,107 +333,19 @@ func (b *batch) finish(fns []*ir.Func) {
}
}
-func (l *location) isName(c ir.Class) bool {
- return l.n != nil && l.n.Op() == ir.ONAME && l.n.(*ir.Name).Class == c
-}
-
-const numEscResults = 7
-
-// An leaks represents a set of assignment flows from a parameter
-// to the heap or to any of its function's (first numEscResults)
-// result parameters.
-type leaks [1 + numEscResults]uint8
-
-// Empty reports whether l is an empty set (i.e., no assignment flows).
-func (l leaks) Empty() bool { return l == leaks{} }
-
-// Heap returns the minimum deref count of any assignment flow from l
-// to the heap. If no such flows exist, Heap returns -1.
-func (l leaks) Heap() int { return l.get(0) }
-
-// Result returns the minimum deref count of any assignment flow from
-// l to its function's i'th result parameter. If no such flows exist,
-// Result returns -1.
-func (l leaks) Result(i int) int { return l.get(1 + i) }
-
-// AddHeap adds an assignment flow from l to the heap.
-func (l *leaks) AddHeap(derefs int) { l.add(0, derefs) }
-
-// AddResult adds an assignment flow from l to its function's i'th
-// result parameter.
-func (l *leaks) AddResult(i, derefs int) { l.add(1+i, derefs) }
-
-func (l *leaks) setResult(i, derefs int) { l.set(1+i, derefs) }
-
-func (l leaks) get(i int) int { return int(l[i]) - 1 }
-
-func (l *leaks) add(i, derefs int) {
- if old := l.get(i); old < 0 || derefs < old {
- l.set(i, derefs)
- }
-}
-
-func (l *leaks) set(i, derefs int) {
- v := derefs + 1
- if v < 0 {
- base.Fatalf("invalid derefs count: %v", derefs)
- }
- if v > math.MaxUint8 {
- v = math.MaxUint8
- }
-
- l[i] = uint8(v)
-}
-
-// Optimize removes result flow paths that are equal in length or
-// longer than the shortest heap flow path.
-func (l *leaks) Optimize() {
- // If we have a path to the heap, then there's no use in
- // keeping equal or longer paths elsewhere.
- if x := l.Heap(); x >= 0 {
- for i := 0; i < numEscResults; i++ {
- if l.Result(i) >= x {
- l.setResult(i, -1)
- }
+// inMutualBatch reports whether function fn is in the batch of
+// mutually recursive functions being analyzed. When this is true,
+// fn has not yet been analyzed, so its parameters and results
+// should be incorporated directly into the flow graph instead of
+// relying on its escape analysis tagging.
+func (e *escape) inMutualBatch(fn *ir.Name) bool {
+ if fn.Defn != nil && fn.Defn.Esc() < escFuncTagged {
+ if fn.Defn.Esc() == escFuncUnknown {
+ base.Fatalf("graph inconsistency: %v", fn)
}
+ return true
}
-}
-
-var leakTagCache = map[leaks]string{}
-
-// Encode converts l into a binary string for export data.
-func (l leaks) Encode() string {
- if l.Heap() == 0 {
- // Space optimization: empty string encodes more
- // efficiently in export data.
- return ""
- }
- if s, ok := leakTagCache[l]; ok {
- return s
- }
-
- n := len(l)
- for n > 0 && l[n-1] == 0 {
- n--
- }
- s := "esc:" + string(l[:n])
- leakTagCache[l] = s
- return s
-}
-
-// parseLeaks parses a binary string representing a leaks
-func parseLeaks(s string) leaks {
- var l leaks
- if !strings.HasPrefix(s, "esc:") {
- l.AddHeap(0)
- return l
- }
- copy(l[:], s[4:])
- return l
-}
-
-func Funcs(all []ir.Node) {
- ir.VisitFuncsBottomUp(all, Batch)
+ return false
}
const (
@@ -1836,220 +363,6 @@ const (
nonlooping
)
-func isSliceSelfAssign(dst, src ir.Node) bool {
- // Detect the following special case.
- //
- // func (b *Buffer) Foo() {
- // n, m := ...
- // b.buf = b.buf[n:m]
- // }
- //
- // This assignment is a no-op for escape analysis,
- // it does not store any new pointers into b that were not already there.
- // However, without this special case b will escape, because we assign to OIND/ODOTPTR.
- // Here we assume that the statement will not contain calls,
- // that is, that order will move any calls to init.
- // Otherwise base ONAME value could change between the moments
- // when we evaluate it for dst and for src.
-
- // dst is ONAME dereference.
- var dstX ir.Node
- switch dst.Op() {
- default:
- return false
- case ir.ODEREF:
- dst := dst.(*ir.StarExpr)
- dstX = dst.X
- case ir.ODOTPTR:
- dst := dst.(*ir.SelectorExpr)
- dstX = dst.X
- }
- if dstX.Op() != ir.ONAME {
- return false
- }
- // src is a slice operation.
- switch src.Op() {
- case ir.OSLICE, ir.OSLICE3, ir.OSLICESTR:
- // OK.
- case ir.OSLICEARR, ir.OSLICE3ARR:
- // Since arrays are embedded into containing object,
- // slice of non-pointer array will introduce a new pointer into b that was not already there
- // (pointer to b itself). After such assignment, if b contents escape,
- // b escapes as well. If we ignore such OSLICEARR, we will conclude
- // that b does not escape when b contents do.
- //
- // Pointer to an array is OK since it's not stored inside b directly.
- // For slicing an array (not pointer to array), there is an implicit OADDR.
- // We check that to determine non-pointer array slicing.
- src := src.(*ir.SliceExpr)
- if src.X.Op() == ir.OADDR {
- return false
- }
- default:
- return false
- }
- // slice is applied to ONAME dereference.
- var baseX ir.Node
- switch base := src.(*ir.SliceExpr).X; base.Op() {
- default:
- return false
- case ir.ODEREF:
- base := base.(*ir.StarExpr)
- baseX = base.X
- case ir.ODOTPTR:
- base := base.(*ir.SelectorExpr)
- baseX = base.X
- }
- if baseX.Op() != ir.ONAME {
- return false
- }
- // dst and src reference the same base ONAME.
- return dstX.(*ir.Name) == baseX.(*ir.Name)
-}
-
-// isSelfAssign reports whether assignment from src to dst can
-// be ignored by the escape analysis as it's effectively a self-assignment.
-func isSelfAssign(dst, src ir.Node) bool {
- if isSliceSelfAssign(dst, src) {
- return true
- }
-
- // Detect trivial assignments that assign back to the same object.
- //
- // It covers these cases:
- // val.x = val.y
- // val.x[i] = val.y[j]
- // val.x1.x2 = val.x1.y2
- // ... etc
- //
- // These assignments do not change assigned object lifetime.
-
- if dst == nil || src == nil || dst.Op() != src.Op() {
- return false
- }
-
- // The expression prefix must be both "safe" and identical.
- switch dst.Op() {
- case ir.ODOT, ir.ODOTPTR:
- // Safe trailing accessors that are permitted to differ.
- dst := dst.(*ir.SelectorExpr)
- src := src.(*ir.SelectorExpr)
- return ir.SameSafeExpr(dst.X, src.X)
- case ir.OINDEX:
- dst := dst.(*ir.IndexExpr)
- src := src.(*ir.IndexExpr)
- if mayAffectMemory(dst.Index) || mayAffectMemory(src.Index) {
- return false
- }
- return ir.SameSafeExpr(dst.X, src.X)
- default:
- return false
- }
-}
-
-// mayAffectMemory reports whether evaluation of n may affect the program's
-// memory state. If the expression can't affect memory state, then it can be
-// safely ignored by the escape analysis.
-func mayAffectMemory(n ir.Node) bool {
- // We may want to use a list of "memory safe" ops instead of generally
- // "side-effect free", which would include all calls and other ops that can
- // allocate or change global state. For now, it's safer to start with the latter.
- //
- // We're ignoring things like division by zero, index out of range,
- // and nil pointer dereference here.
-
- // TODO(rsc): It seems like it should be possible to replace this with
- // an ir.Any looking for any op that's not the ones in the case statement.
- // But that produces changes in the compiled output detected by buildall.
- switch n.Op() {
- case ir.ONAME, ir.OLITERAL, ir.ONIL:
- return false
-
- case ir.OADD, ir.OSUB, ir.OOR, ir.OXOR, ir.OMUL, ir.OLSH, ir.ORSH, ir.OAND, ir.OANDNOT, ir.ODIV, ir.OMOD:
- n := n.(*ir.BinaryExpr)
- return mayAffectMemory(n.X) || mayAffectMemory(n.Y)
-
- case ir.OINDEX:
- n := n.(*ir.IndexExpr)
- return mayAffectMemory(n.X) || mayAffectMemory(n.Index)
-
- case ir.OCONVNOP, ir.OCONV:
- n := n.(*ir.ConvExpr)
- return mayAffectMemory(n.X)
-
- case ir.OLEN, ir.OCAP, ir.ONOT, ir.OBITNOT, ir.OPLUS, ir.ONEG, ir.OALIGNOF, ir.OOFFSETOF, ir.OSIZEOF:
- n := n.(*ir.UnaryExpr)
- return mayAffectMemory(n.X)
-
- case ir.ODOT, ir.ODOTPTR:
- n := n.(*ir.SelectorExpr)
- return mayAffectMemory(n.X)
-
- case ir.ODEREF:
- n := n.(*ir.StarExpr)
- return mayAffectMemory(n.X)
-
- default:
- return true
- }
-}
-
-// HeapAllocReason returns the reason the given Node must be heap
-// allocated, or the empty string if it doesn't.
-func HeapAllocReason(n ir.Node) string {
- if n == nil || n.Type() == nil {
- return ""
- }
-
- // Parameters are always passed via the stack.
- if n.Op() == ir.ONAME {
- n := n.(*ir.Name)
- if n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT {
- return ""
- }
- }
-
- if n.Type().Width > ir.MaxStackVarSize {
- return "too large for stack"
- }
-
- if (n.Op() == ir.ONEW || n.Op() == ir.OPTRLIT) && n.Type().Elem().Width > ir.MaxImplicitStackVarSize {
- return "too large for stack"
- }
-
- if n.Op() == ir.OCLOSURE && typecheck.ClosureType(n.(*ir.ClosureExpr)).Size() > ir.MaxImplicitStackVarSize {
- return "too large for stack"
- }
- if n.Op() == ir.OCALLPART && typecheck.PartialCallType(n.(*ir.SelectorExpr)).Size() > ir.MaxImplicitStackVarSize {
- return "too large for stack"
- }
-
- if n.Op() == ir.OMAKESLICE {
- n := n.(*ir.MakeExpr)
- r := n.Cap
- if r == nil {
- r = n.Len
- }
- if !ir.IsSmallIntConst(r) {
- return "non-constant size"
- }
- if t := n.Type(); t.Elem().Width != 0 && ir.Int64Val(r) > ir.MaxImplicitStackVarSize/t.Elem().Width {
- return "too large for stack"
- }
- }
-
- return ""
-}
-
-// This special tag is applied to uintptr variables
-// that we believe may hold unsafe.Pointers for
-// calls into assembly functions.
-const UnsafeUintptrNote = "unsafe-uintptr"
-
-// This special tag is applied to uintptr parameters of functions
-// marked go:uintptrescapes.
-const UintptrEscapesNote = "uintptr-escapes"
-
func (b *batch) paramTag(fn *ir.Func, narg int, f *types.Field) string {
name := func() string {
if f.Sym != nil {
@@ -2058,6 +371,11 @@ func (b *batch) paramTag(fn *ir.Func, narg int, f *types.Field) string {
return fmt.Sprintf("arg#%d", narg)
}
+ // Only report diagnostics for user code;
+ // not for wrappers generated around them.
+ // TODO(mdempsky): Generalize this.
+ diagnose := base.Flag.LowerM != 0 && !(fn.Wrapper() || fn.Dupok())
+
if len(fn.Body) == 0 {
// Assume that uintptr arguments must be held live across the call.
// This is most important for syscall.Syscall.
@@ -2065,11 +383,13 @@ func (b *batch) paramTag(fn *ir.Func, narg int, f *types.Field) string {
// This really doesn't have much to do with escape analysis per se,
// but we are reusing the ability to annotate an individual function
// argument and pass those annotations along to importing code.
+ fn.Pragma |= ir.UintptrKeepAlive
+
if f.Type.IsUintptr() {
- if base.Flag.LowerM != 0 {
+ if diagnose {
base.WarnfAt(f.Pos, "assuming %v is unsafe uintptr", name())
}
- return UnsafeUintptrNote
+ return ""
}
if !f.Type.HasPointers() { // don't bother tagging for scalars
@@ -2081,11 +401,11 @@ func (b *batch) paramTag(fn *ir.Func, narg int, f *types.Field) string {
// External functions are assumed unsafe, unless
// //go:noescape is given before the declaration.
if fn.Pragma&ir.Noescape != 0 {
- if base.Flag.LowerM != 0 && f.Sym != nil {
+ if diagnose && f.Sym != nil {
base.WarnfAt(f.Pos, "%v does not escape", name())
}
} else {
- if base.Flag.LowerM != 0 && f.Sym != nil {
+ if diagnose && f.Sym != nil {
base.WarnfAt(f.Pos, "leaking param: %v", name())
}
esc.AddHeap(0)
@@ -2095,18 +415,20 @@ func (b *batch) paramTag(fn *ir.Func, narg int, f *types.Field) string {
}
if fn.Pragma&ir.UintptrEscapes != 0 {
+ fn.Pragma |= ir.UintptrKeepAlive
+
if f.Type.IsUintptr() {
- if base.Flag.LowerM != 0 {
+ if diagnose {
base.WarnfAt(f.Pos, "marking %v as escaping uintptr", name())
}
- return UintptrEscapesNote
+ return ""
}
if f.IsDDD() && f.Type.Elem().IsUintptr() {
// final argument is ...uintptr.
- if base.Flag.LowerM != 0 {
+ if diagnose {
base.WarnfAt(f.Pos, "marking %v as escaping ...uintptr", name())
}
- return UintptrEscapesNote
+ return ""
}
}
@@ -2125,7 +447,7 @@ func (b *batch) paramTag(fn *ir.Func, narg int, f *types.Field) string {
esc := loc.paramEsc
esc.Optimize()
- if base.Flag.LowerM != 0 && !loc.escapes {
+ if diagnose && !loc.escapes {
if esc.Empty() {
base.WarnfAt(f.Pos, "%v does not escape", name())
}
diff --git a/src/cmd/compile/internal/escape/expr.go b/src/cmd/compile/internal/escape/expr.go
new file mode 100644
index 0000000000..62afb5b928
--- /dev/null
+++ b/src/cmd/compile/internal/escape/expr.go
@@ -0,0 +1,335 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package escape
+
+import (
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/types"
+)
+
+// expr models evaluating an expression n and flowing the result into
+// hole k.
+func (e *escape) expr(k hole, n ir.Node) {
+ if n == nil {
+ return
+ }
+ e.stmts(n.Init())
+ e.exprSkipInit(k, n)
+}
+
+func (e *escape) exprSkipInit(k hole, n ir.Node) {
+ if n == nil {
+ return
+ }
+
+ lno := ir.SetPos(n)
+ defer func() {
+ base.Pos = lno
+ }()
+
+ if k.derefs >= 0 && !n.Type().HasPointers() {
+ k.dst = &e.blankLoc
+ }
+
+ switch n.Op() {
+ default:
+ base.Fatalf("unexpected expr: %s %v", n.Op().String(), n)
+
+ case ir.OLITERAL, ir.ONIL, ir.OGETG, ir.OGETCALLERPC, ir.OGETCALLERSP, ir.OTYPE, ir.OMETHEXPR, ir.OLINKSYMOFFSET:
+ // nop
+
+ case ir.ONAME:
+ n := n.(*ir.Name)
+ if n.Class == ir.PFUNC || n.Class == ir.PEXTERN {
+ return
+ }
+ e.flow(k, e.oldLoc(n))
+
+ case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT:
+ n := n.(*ir.UnaryExpr)
+ e.discard(n.X)
+ case ir.OADD, ir.OSUB, ir.OOR, ir.OXOR, ir.OMUL, ir.ODIV, ir.OMOD, ir.OLSH, ir.ORSH, ir.OAND, ir.OANDNOT, ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
+ n := n.(*ir.BinaryExpr)
+ e.discard(n.X)
+ e.discard(n.Y)
+ case ir.OANDAND, ir.OOROR:
+ n := n.(*ir.LogicalExpr)
+ e.discard(n.X)
+ e.discard(n.Y)
+ case ir.OADDR:
+ n := n.(*ir.AddrExpr)
+ e.expr(k.addr(n, "address-of"), n.X) // "address-of"
+ case ir.ODEREF:
+ n := n.(*ir.StarExpr)
+ e.expr(k.deref(n, "indirection"), n.X) // "indirection"
+ case ir.ODOT, ir.ODOTMETH, ir.ODOTINTER:
+ n := n.(*ir.SelectorExpr)
+ e.expr(k.note(n, "dot"), n.X)
+ case ir.ODOTPTR:
+ n := n.(*ir.SelectorExpr)
+ e.expr(k.deref(n, "dot of pointer"), n.X) // "dot of pointer"
+ case ir.ODOTTYPE, ir.ODOTTYPE2:
+ n := n.(*ir.TypeAssertExpr)
+ e.expr(k.dotType(n.Type(), n, "dot"), n.X)
+ case ir.ODYNAMICDOTTYPE, ir.ODYNAMICDOTTYPE2:
+ n := n.(*ir.DynamicTypeAssertExpr)
+ e.expr(k.dotType(n.Type(), n, "dot"), n.X)
+ // n.T doesn't need to be tracked; it always points to read-only storage.
+ case ir.OINDEX:
+ n := n.(*ir.IndexExpr)
+ if n.X.Type().IsArray() {
+ e.expr(k.note(n, "fixed-array-index-of"), n.X)
+ } else {
+ // TODO(mdempsky): Fix why reason text.
+ e.expr(k.deref(n, "dot of pointer"), n.X)
+ }
+ e.discard(n.Index)
+ case ir.OINDEXMAP:
+ n := n.(*ir.IndexExpr)
+ e.discard(n.X)
+ e.discard(n.Index)
+ case ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR, ir.OSLICESTR:
+ n := n.(*ir.SliceExpr)
+ e.expr(k.note(n, "slice"), n.X)
+ e.discard(n.Low)
+ e.discard(n.High)
+ e.discard(n.Max)
+
+ case ir.OCONV, ir.OCONVNOP:
+ n := n.(*ir.ConvExpr)
+ if ir.ShouldCheckPtr(e.curfn, 2) && n.Type().IsUnsafePtr() && n.X.Type().IsPtr() {
+ // When -d=checkptr=2 is enabled, treat
+ // conversions to unsafe.Pointer as an
+ // escaping operation. This allows better
+ // runtime instrumentation, since we can more
+ // easily detect object boundaries on the heap
+ // than the stack.
+ e.assignHeap(n.X, "conversion to unsafe.Pointer", n)
+ } else if n.Type().IsUnsafePtr() && n.X.Type().IsUintptr() {
+ e.unsafeValue(k, n.X)
+ } else {
+ e.expr(k, n.X)
+ }
+ case ir.OCONVIFACE, ir.OCONVIDATA:
+ n := n.(*ir.ConvExpr)
+ if !n.X.Type().IsInterface() && !types.IsDirectIface(n.X.Type()) {
+ k = e.spill(k, n)
+ }
+ e.expr(k.note(n, "interface-converted"), n.X)
+ case ir.OEFACE:
+ n := n.(*ir.BinaryExpr)
+ // Note: n.X is not needed because it can never point to memory that might escape.
+ e.expr(k, n.Y)
+ case ir.OIDATA, ir.OSPTR:
+ n := n.(*ir.UnaryExpr)
+ e.expr(k, n.X)
+ case ir.OSLICE2ARRPTR:
+ // the slice pointer flows directly to the result
+ n := n.(*ir.ConvExpr)
+ e.expr(k, n.X)
+ case ir.ORECV:
+ n := n.(*ir.UnaryExpr)
+ e.discard(n.X)
+
+ case ir.OCALLMETH, ir.OCALLFUNC, ir.OCALLINTER, ir.OINLCALL, ir.OLEN, ir.OCAP, ir.OCOMPLEX, ir.OREAL, ir.OIMAG, ir.OAPPEND, ir.OCOPY, ir.ORECOVER, ir.OUNSAFEADD, ir.OUNSAFESLICE:
+ e.call([]hole{k}, n)
+
+ case ir.ONEW:
+ n := n.(*ir.UnaryExpr)
+ e.spill(k, n)
+
+ case ir.OMAKESLICE:
+ n := n.(*ir.MakeExpr)
+ e.spill(k, n)
+ e.discard(n.Len)
+ e.discard(n.Cap)
+ case ir.OMAKECHAN:
+ n := n.(*ir.MakeExpr)
+ e.discard(n.Len)
+ case ir.OMAKEMAP:
+ n := n.(*ir.MakeExpr)
+ e.spill(k, n)
+ e.discard(n.Len)
+
+ case ir.OMETHVALUE:
+ // Flow the receiver argument to both the closure and
+ // to the receiver parameter.
+
+ n := n.(*ir.SelectorExpr)
+ closureK := e.spill(k, n)
+
+ m := n.Selection
+
+ // We don't know how the method value will be called
+ // later, so conservatively assume the result
+ // parameters all flow to the heap.
+ //
+ // TODO(mdempsky): Change ks into a callback, so that
+ // we don't have to create this slice?
+ var ks []hole
+ for i := m.Type.NumResults(); i > 0; i-- {
+ ks = append(ks, e.heapHole())
+ }
+ name, _ := m.Nname.(*ir.Name)
+ paramK := e.tagHole(ks, name, m.Type.Recv())
+
+ e.expr(e.teeHole(paramK, closureK), n.X)
+
+ case ir.OPTRLIT:
+ n := n.(*ir.AddrExpr)
+ e.expr(e.spill(k, n), n.X)
+
+ case ir.OARRAYLIT:
+ n := n.(*ir.CompLitExpr)
+ for _, elt := range n.List {
+ if elt.Op() == ir.OKEY {
+ elt = elt.(*ir.KeyExpr).Value
+ }
+ e.expr(k.note(n, "array literal element"), elt)
+ }
+
+ case ir.OSLICELIT:
+ n := n.(*ir.CompLitExpr)
+ k = e.spill(k, n)
+
+ for _, elt := range n.List {
+ if elt.Op() == ir.OKEY {
+ elt = elt.(*ir.KeyExpr).Value
+ }
+ e.expr(k.note(n, "slice-literal-element"), elt)
+ }
+
+ case ir.OSTRUCTLIT:
+ n := n.(*ir.CompLitExpr)
+ for _, elt := range n.List {
+ e.expr(k.note(n, "struct literal element"), elt.(*ir.StructKeyExpr).Value)
+ }
+
+ case ir.OMAPLIT:
+ n := n.(*ir.CompLitExpr)
+ e.spill(k, n)
+
+ // Map keys and values are always stored in the heap.
+ for _, elt := range n.List {
+ elt := elt.(*ir.KeyExpr)
+ e.assignHeap(elt.Key, "map literal key", n)
+ e.assignHeap(elt.Value, "map literal value", n)
+ }
+
+ case ir.OCLOSURE:
+ n := n.(*ir.ClosureExpr)
+ k = e.spill(k, n)
+ e.closures = append(e.closures, closure{k, n})
+
+ if fn := n.Func; fn.IsHiddenClosure() {
+ for _, cv := range fn.ClosureVars {
+ if loc := e.oldLoc(cv); !loc.captured {
+ loc.captured = true
+
+ // Ignore reassignments to the variable in straightline code
+ // preceding the first capture by a closure.
+ if loc.loopDepth == e.loopDepth {
+ loc.reassigned = false
+ }
+ }
+ }
+
+ for _, n := range fn.Dcl {
+ // Add locations for local variables of the
+ // closure, if needed, in case we're not including
+ // the closure func in the batch for escape
+ // analysis (happens for escape analysis called
+ // from reflectdata.methodWrapper)
+ if n.Op() == ir.ONAME && n.Opt == nil {
+ e.with(fn).newLoc(n, false)
+ }
+ }
+ e.walkFunc(fn)
+ }
+
+ case ir.ORUNES2STR, ir.OBYTES2STR, ir.OSTR2RUNES, ir.OSTR2BYTES, ir.ORUNESTR:
+ n := n.(*ir.ConvExpr)
+ e.spill(k, n)
+ e.discard(n.X)
+
+ case ir.OADDSTR:
+ n := n.(*ir.AddStringExpr)
+ e.spill(k, n)
+
+ // Arguments of OADDSTR never escape;
+ // runtime.concatstrings makes sure of that.
+ e.discards(n.List)
+
+ case ir.ODYNAMICTYPE:
+ // Nothing to do - argument is a *runtime._type (+ maybe a *runtime.itab) pointing to static data section
+ }
+}
+
+// unsafeValue evaluates a uintptr-typed arithmetic expression looking
+// for conversions from an unsafe.Pointer.
+func (e *escape) unsafeValue(k hole, n ir.Node) {
+ if n.Type().Kind() != types.TUINTPTR {
+ base.Fatalf("unexpected type %v for %v", n.Type(), n)
+ }
+ if k.addrtaken {
+ base.Fatalf("unexpected addrtaken")
+ }
+
+ e.stmts(n.Init())
+
+ switch n.Op() {
+ case ir.OCONV, ir.OCONVNOP:
+ n := n.(*ir.ConvExpr)
+ if n.X.Type().IsUnsafePtr() {
+ e.expr(k, n.X)
+ } else {
+ e.discard(n.X)
+ }
+ case ir.ODOTPTR:
+ n := n.(*ir.SelectorExpr)
+ if ir.IsReflectHeaderDataField(n) {
+ e.expr(k.deref(n, "reflect.Header.Data"), n.X)
+ } else {
+ e.discard(n.X)
+ }
+ case ir.OPLUS, ir.ONEG, ir.OBITNOT:
+ n := n.(*ir.UnaryExpr)
+ e.unsafeValue(k, n.X)
+ case ir.OADD, ir.OSUB, ir.OOR, ir.OXOR, ir.OMUL, ir.ODIV, ir.OMOD, ir.OAND, ir.OANDNOT:
+ n := n.(*ir.BinaryExpr)
+ e.unsafeValue(k, n.X)
+ e.unsafeValue(k, n.Y)
+ case ir.OLSH, ir.ORSH:
+ n := n.(*ir.BinaryExpr)
+ e.unsafeValue(k, n.X)
+ // RHS need not be uintptr-typed (#32959) and can't meaningfully
+ // flow pointers anyway.
+ e.discard(n.Y)
+ default:
+ e.exprSkipInit(e.discardHole(), n)
+ }
+}
+
+// discard evaluates an expression n for side-effects, but discards
+// its value.
+func (e *escape) discard(n ir.Node) {
+ e.expr(e.discardHole(), n)
+}
+
+func (e *escape) discards(l ir.Nodes) {
+ for _, n := range l {
+ e.discard(n)
+ }
+}
+
+// spill allocates a new location associated with expression n, flows
+// its address to k, and returns a hole that flows values to it. It's
+// intended for use with most expressions that allocate storage.
+func (e *escape) spill(k hole, n ir.Node) hole {
+ loc := e.newLoc(n, true)
+ e.flow(k.addr(n, "spill"), loc)
+ return loc.asHole()
+}
diff --git a/src/cmd/compile/internal/escape/graph.go b/src/cmd/compile/internal/escape/graph.go
new file mode 100644
index 0000000000..cc3d078add
--- /dev/null
+++ b/src/cmd/compile/internal/escape/graph.go
@@ -0,0 +1,324 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package escape
+
+import (
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/logopt"
+ "cmd/compile/internal/types"
+ "fmt"
+)
+
+// Below we implement the methods for walking the AST and recording
+// data flow edges. Note that because a sub-expression might have
+// side-effects, it's important to always visit the entire AST.
+//
+// For example, write either:
+//
+// if x {
+// e.discard(n.Left)
+// } else {
+// e.value(k, n.Left)
+// }
+//
+// or
+//
+// if x {
+// k = e.discardHole()
+// }
+// e.value(k, n.Left)
+//
+// Do NOT write:
+//
+// // BAD: possibly loses side-effects within n.Left
+// if !x {
+// e.value(k, n.Left)
+// }
+
+// An location represents an abstract location that stores a Go
+// variable.
+type location struct {
+ n ir.Node // represented variable or expression, if any
+ curfn *ir.Func // enclosing function
+ edges []edge // incoming edges
+ loopDepth int // loopDepth at declaration
+
+ // resultIndex records the tuple index (starting at 1) for
+ // PPARAMOUT variables within their function's result type.
+ // For non-PPARAMOUT variables it's 0.
+ resultIndex int
+
+ // derefs and walkgen are used during walkOne to track the
+ // minimal dereferences from the walk root.
+ derefs int // >= -1
+ walkgen uint32
+
+ // dst and dstEdgeindex track the next immediate assignment
+ // destination location during walkone, along with the index
+ // of the edge pointing back to this location.
+ dst *location
+ dstEdgeIdx int
+
+ // queued is used by walkAll to track whether this location is
+ // in the walk queue.
+ queued bool
+
+ // escapes reports whether the represented variable's address
+ // escapes; that is, whether the variable must be heap
+ // allocated.
+ escapes bool
+
+ // transient reports whether the represented expression's
+ // address does not outlive the statement; that is, whether
+ // its storage can be immediately reused.
+ transient bool
+
+ // paramEsc records the represented parameter's leak set.
+ paramEsc leaks
+
+ captured bool // has a closure captured this variable?
+ reassigned bool // has this variable been reassigned?
+ addrtaken bool // has this variable's address been taken?
+}
+
+// An edge represents an assignment edge between two Go variables.
+type edge struct {
+ src *location
+ derefs int // >= -1
+ notes *note
+}
+
+func (l *location) asHole() hole {
+ return hole{dst: l}
+}
+
+// leak records that parameter l leaks to sink.
+func (l *location) leakTo(sink *location, derefs int) {
+ // If sink is a result parameter that doesn't escape (#44614)
+ // and we can fit return bits into the escape analysis tag,
+ // then record as a result leak.
+ if !sink.escapes && sink.isName(ir.PPARAMOUT) && sink.curfn == l.curfn {
+ ri := sink.resultIndex - 1
+ if ri < numEscResults {
+ // Leak to result parameter.
+ l.paramEsc.AddResult(ri, derefs)
+ return
+ }
+ }
+
+ // Otherwise, record as heap leak.
+ l.paramEsc.AddHeap(derefs)
+}
+
+func (l *location) isName(c ir.Class) bool {
+ return l.n != nil && l.n.Op() == ir.ONAME && l.n.(*ir.Name).Class == c
+}
+
+// A hole represents a context for evaluation of a Go
+// expression. E.g., when evaluating p in "x = **p", we'd have a hole
+// with dst==x and derefs==2.
+type hole struct {
+ dst *location
+ derefs int // >= -1
+ notes *note
+
+ // addrtaken indicates whether this context is taking the address of
+ // the expression, independent of whether the address will actually
+ // be stored into a variable.
+ addrtaken bool
+}
+
+type note struct {
+ next *note
+ where ir.Node
+ why string
+}
+
+func (k hole) note(where ir.Node, why string) hole {
+ if where == nil || why == "" {
+ base.Fatalf("note: missing where/why")
+ }
+ if base.Flag.LowerM >= 2 || logopt.Enabled() {
+ k.notes = &note{
+ next: k.notes,
+ where: where,
+ why: why,
+ }
+ }
+ return k
+}
+
+func (k hole) shift(delta int) hole {
+ k.derefs += delta
+ if k.derefs < -1 {
+ base.Fatalf("derefs underflow: %v", k.derefs)
+ }
+ k.addrtaken = delta < 0
+ return k
+}
+
+func (k hole) deref(where ir.Node, why string) hole { return k.shift(1).note(where, why) }
+func (k hole) addr(where ir.Node, why string) hole { return k.shift(-1).note(where, why) }
+
+func (k hole) dotType(t *types.Type, where ir.Node, why string) hole {
+ if !t.IsInterface() && !types.IsDirectIface(t) {
+ k = k.shift(1)
+ }
+ return k.note(where, why)
+}
+
+func (b *batch) flow(k hole, src *location) {
+ if k.addrtaken {
+ src.addrtaken = true
+ }
+
+ dst := k.dst
+ if dst == &b.blankLoc {
+ return
+ }
+ if dst == src && k.derefs >= 0 { // dst = dst, dst = *dst, ...
+ return
+ }
+ if dst.escapes && k.derefs < 0 { // dst = &src
+ if base.Flag.LowerM >= 2 || logopt.Enabled() {
+ pos := base.FmtPos(src.n.Pos())
+ if base.Flag.LowerM >= 2 {
+ fmt.Printf("%s: %v escapes to heap:\n", pos, src.n)
+ }
+ explanation := b.explainFlow(pos, dst, src, k.derefs, k.notes, []*logopt.LoggedOpt{})
+ if logopt.Enabled() {
+ var e_curfn *ir.Func // TODO(mdempsky): Fix.
+ logopt.LogOpt(src.n.Pos(), "escapes", "escape", ir.FuncName(e_curfn), fmt.Sprintf("%v escapes to heap", src.n), explanation)
+ }
+
+ }
+ src.escapes = true
+ return
+ }
+
+ // TODO(mdempsky): Deduplicate edges?
+ dst.edges = append(dst.edges, edge{src: src, derefs: k.derefs, notes: k.notes})
+}
+
+func (b *batch) heapHole() hole { return b.heapLoc.asHole() }
+func (b *batch) discardHole() hole { return b.blankLoc.asHole() }
+
+func (b *batch) oldLoc(n *ir.Name) *location {
+ if n.Canonical().Opt == nil {
+ base.Fatalf("%v has no location", n)
+ }
+ return n.Canonical().Opt.(*location)
+}
+
+func (e *escape) newLoc(n ir.Node, transient bool) *location {
+ if e.curfn == nil {
+ base.Fatalf("e.curfn isn't set")
+ }
+ if n != nil && n.Type() != nil && n.Type().NotInHeap() {
+ base.ErrorfAt(n.Pos(), "%v is incomplete (or unallocatable); stack allocation disallowed", n.Type())
+ }
+
+ if n != nil && n.Op() == ir.ONAME {
+ if canon := n.(*ir.Name).Canonical(); n != canon {
+ base.Fatalf("newLoc on non-canonical %v (canonical is %v)", n, canon)
+ }
+ }
+ loc := &location{
+ n: n,
+ curfn: e.curfn,
+ loopDepth: e.loopDepth,
+ transient: transient,
+ }
+ e.allLocs = append(e.allLocs, loc)
+ if n != nil {
+ if n.Op() == ir.ONAME {
+ n := n.(*ir.Name)
+ if n.Class == ir.PPARAM && n.Curfn == nil {
+ // ok; hidden parameter
+ } else if n.Curfn != e.curfn {
+ base.Fatalf("curfn mismatch: %v != %v for %v", n.Curfn, e.curfn, n)
+ }
+
+ if n.Opt != nil {
+ base.Fatalf("%v already has a location", n)
+ }
+ n.Opt = loc
+ }
+ }
+ return loc
+}
+
+// teeHole returns a new hole that flows into each hole of ks,
+// similar to the Unix tee(1) command.
+func (e *escape) teeHole(ks ...hole) hole {
+ if len(ks) == 0 {
+ return e.discardHole()
+ }
+ if len(ks) == 1 {
+ return ks[0]
+ }
+ // TODO(mdempsky): Optimize if there's only one non-discard hole?
+
+ // Given holes "l1 = _", "l2 = **_", "l3 = *_", ..., create a
+ // new temporary location ltmp, wire it into place, and return
+ // a hole for "ltmp = _".
+ loc := e.newLoc(nil, true)
+ for _, k := range ks {
+ // N.B., "p = &q" and "p = &tmp; tmp = q" are not
+ // semantically equivalent. To combine holes like "l1
+ // = _" and "l2 = &_", we'd need to wire them as "l1 =
+ // *ltmp" and "l2 = ltmp" and return "ltmp = &_"
+ // instead.
+ if k.derefs < 0 {
+ base.Fatalf("teeHole: negative derefs")
+ }
+
+ e.flow(k, loc)
+ }
+ return loc.asHole()
+}
+
+// later returns a new hole that flows into k, but some time later.
+// Its main effect is to prevent immediate reuse of temporary
+// variables introduced during Order.
+func (e *escape) later(k hole) hole {
+ loc := e.newLoc(nil, false)
+ e.flow(k, loc)
+ return loc.asHole()
+}
+
+// Fmt is called from node printing to print information about escape analysis results.
+func Fmt(n ir.Node) string {
+ text := ""
+ switch n.Esc() {
+ case ir.EscUnknown:
+ break
+
+ case ir.EscHeap:
+ text = "esc(h)"
+
+ case ir.EscNone:
+ text = "esc(no)"
+
+ case ir.EscNever:
+ text = "esc(N)"
+
+ default:
+ text = fmt.Sprintf("esc(%d)", n.Esc())
+ }
+
+ if n.Op() == ir.ONAME {
+ n := n.(*ir.Name)
+ if loc, ok := n.Opt.(*location); ok && loc.loopDepth != 0 {
+ if text != "" {
+ text += " "
+ }
+ text += fmt.Sprintf("ld(%d)", loc.loopDepth)
+ }
+ }
+
+ return text
+}
diff --git a/src/cmd/compile/internal/escape/leaks.go b/src/cmd/compile/internal/escape/leaks.go
new file mode 100644
index 0000000000..4c848a5ee7
--- /dev/null
+++ b/src/cmd/compile/internal/escape/leaks.go
@@ -0,0 +1,106 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package escape
+
+import (
+ "cmd/compile/internal/base"
+ "math"
+ "strings"
+)
+
+const numEscResults = 7
+
+// An leaks represents a set of assignment flows from a parameter
+// to the heap or to any of its function's (first numEscResults)
+// result parameters.
+type leaks [1 + numEscResults]uint8
+
+// Empty reports whether l is an empty set (i.e., no assignment flows).
+func (l leaks) Empty() bool { return l == leaks{} }
+
+// Heap returns the minimum deref count of any assignment flow from l
+// to the heap. If no such flows exist, Heap returns -1.
+func (l leaks) Heap() int { return l.get(0) }
+
+// Result returns the minimum deref count of any assignment flow from
+// l to its function's i'th result parameter. If no such flows exist,
+// Result returns -1.
+func (l leaks) Result(i int) int { return l.get(1 + i) }
+
+// AddHeap adds an assignment flow from l to the heap.
+func (l *leaks) AddHeap(derefs int) { l.add(0, derefs) }
+
+// AddResult adds an assignment flow from l to its function's i'th
+// result parameter.
+func (l *leaks) AddResult(i, derefs int) { l.add(1+i, derefs) }
+
+func (l *leaks) setResult(i, derefs int) { l.set(1+i, derefs) }
+
+func (l leaks) get(i int) int { return int(l[i]) - 1 }
+
+func (l *leaks) add(i, derefs int) {
+ if old := l.get(i); old < 0 || derefs < old {
+ l.set(i, derefs)
+ }
+}
+
+func (l *leaks) set(i, derefs int) {
+ v := derefs + 1
+ if v < 0 {
+ base.Fatalf("invalid derefs count: %v", derefs)
+ }
+ if v > math.MaxUint8 {
+ v = math.MaxUint8
+ }
+
+ l[i] = uint8(v)
+}
+
+// Optimize removes result flow paths that are equal in length or
+// longer than the shortest heap flow path.
+func (l *leaks) Optimize() {
+ // If we have a path to the heap, then there's no use in
+ // keeping equal or longer paths elsewhere.
+ if x := l.Heap(); x >= 0 {
+ for i := 0; i < numEscResults; i++ {
+ if l.Result(i) >= x {
+ l.setResult(i, -1)
+ }
+ }
+ }
+}
+
+var leakTagCache = map[leaks]string{}
+
+// Encode converts l into a binary string for export data.
+func (l leaks) Encode() string {
+ if l.Heap() == 0 {
+ // Space optimization: empty string encodes more
+ // efficiently in export data.
+ return ""
+ }
+ if s, ok := leakTagCache[l]; ok {
+ return s
+ }
+
+ n := len(l)
+ for n > 0 && l[n-1] == 0 {
+ n--
+ }
+ s := "esc:" + string(l[:n])
+ leakTagCache[l] = s
+ return s
+}
+
+// parseLeaks parses a binary string representing a leaks
+func parseLeaks(s string) leaks {
+ var l leaks
+ if !strings.HasPrefix(s, "esc:") {
+ l.AddHeap(0)
+ return l
+ }
+ copy(l[:], s[4:])
+ return l
+}
diff --git a/src/cmd/compile/internal/escape/solve.go b/src/cmd/compile/internal/escape/solve.go
new file mode 100644
index 0000000000..77d6b27dd7
--- /dev/null
+++ b/src/cmd/compile/internal/escape/solve.go
@@ -0,0 +1,289 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package escape
+
+import (
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/logopt"
+ "cmd/internal/src"
+ "fmt"
+ "strings"
+)
+
+// walkAll computes the minimal dereferences between all pairs of
+// locations.
+func (b *batch) walkAll() {
+ // We use a work queue to keep track of locations that we need
+ // to visit, and repeatedly walk until we reach a fixed point.
+ //
+ // We walk once from each location (including the heap), and
+ // then re-enqueue each location on its transition from
+ // transient->!transient and !escapes->escapes, which can each
+ // happen at most once. So we take Θ(len(e.allLocs)) walks.
+
+ // LIFO queue, has enough room for e.allLocs and e.heapLoc.
+ todo := make([]*location, 0, len(b.allLocs)+1)
+ enqueue := func(loc *location) {
+ if !loc.queued {
+ todo = append(todo, loc)
+ loc.queued = true
+ }
+ }
+
+ for _, loc := range b.allLocs {
+ enqueue(loc)
+ }
+ enqueue(&b.heapLoc)
+
+ var walkgen uint32
+ for len(todo) > 0 {
+ root := todo[len(todo)-1]
+ todo = todo[:len(todo)-1]
+ root.queued = false
+
+ walkgen++
+ b.walkOne(root, walkgen, enqueue)
+ }
+}
+
+// walkOne computes the minimal number of dereferences from root to
+// all other locations.
+func (b *batch) walkOne(root *location, walkgen uint32, enqueue func(*location)) {
+ // The data flow graph has negative edges (from addressing
+ // operations), so we use the Bellman-Ford algorithm. However,
+ // we don't have to worry about infinite negative cycles since
+ // we bound intermediate dereference counts to 0.
+
+ root.walkgen = walkgen
+ root.derefs = 0
+ root.dst = nil
+
+ todo := []*location{root} // LIFO queue
+ for len(todo) > 0 {
+ l := todo[len(todo)-1]
+ todo = todo[:len(todo)-1]
+
+ derefs := l.derefs
+
+ // If l.derefs < 0, then l's address flows to root.
+ addressOf := derefs < 0
+ if addressOf {
+ // For a flow path like "root = &l; l = x",
+ // l's address flows to root, but x's does
+ // not. We recognize this by lower bounding
+ // derefs at 0.
+ derefs = 0
+
+ // If l's address flows to a non-transient
+ // location, then l can't be transiently
+ // allocated.
+ if !root.transient && l.transient {
+ l.transient = false
+ enqueue(l)
+ }
+ }
+
+ if b.outlives(root, l) {
+ // l's value flows to root. If l is a function
+ // parameter and root is the heap or a
+ // corresponding result parameter, then record
+ // that value flow for tagging the function
+ // later.
+ if l.isName(ir.PPARAM) {
+ if (logopt.Enabled() || base.Flag.LowerM >= 2) && !l.escapes {
+ if base.Flag.LowerM >= 2 {
+ fmt.Printf("%s: parameter %v leaks to %s with derefs=%d:\n", base.FmtPos(l.n.Pos()), l.n, b.explainLoc(root), derefs)
+ }
+ explanation := b.explainPath(root, l)
+ if logopt.Enabled() {
+ var e_curfn *ir.Func // TODO(mdempsky): Fix.
+ logopt.LogOpt(l.n.Pos(), "leak", "escape", ir.FuncName(e_curfn),
+ fmt.Sprintf("parameter %v leaks to %s with derefs=%d", l.n, b.explainLoc(root), derefs), explanation)
+ }
+ }
+ l.leakTo(root, derefs)
+ }
+
+ // If l's address flows somewhere that
+ // outlives it, then l needs to be heap
+ // allocated.
+ if addressOf && !l.escapes {
+ if logopt.Enabled() || base.Flag.LowerM >= 2 {
+ if base.Flag.LowerM >= 2 {
+ fmt.Printf("%s: %v escapes to heap:\n", base.FmtPos(l.n.Pos()), l.n)
+ }
+ explanation := b.explainPath(root, l)
+ if logopt.Enabled() {
+ var e_curfn *ir.Func // TODO(mdempsky): Fix.
+ logopt.LogOpt(l.n.Pos(), "escape", "escape", ir.FuncName(e_curfn), fmt.Sprintf("%v escapes to heap", l.n), explanation)
+ }
+ }
+ l.escapes = true
+ enqueue(l)
+ continue
+ }
+ }
+
+ for i, edge := range l.edges {
+ if edge.src.escapes {
+ continue
+ }
+ d := derefs + edge.derefs
+ if edge.src.walkgen != walkgen || edge.src.derefs > d {
+ edge.src.walkgen = walkgen
+ edge.src.derefs = d
+ edge.src.dst = l
+ edge.src.dstEdgeIdx = i
+ todo = append(todo, edge.src)
+ }
+ }
+ }
+}
+
+// explainPath prints an explanation of how src flows to the walk root.
+func (b *batch) explainPath(root, src *location) []*logopt.LoggedOpt {
+ visited := make(map[*location]bool)
+ pos := base.FmtPos(src.n.Pos())
+ var explanation []*logopt.LoggedOpt
+ for {
+ // Prevent infinite loop.
+ if visited[src] {
+ if base.Flag.LowerM >= 2 {
+ fmt.Printf("%s: warning: truncated explanation due to assignment cycle; see golang.org/issue/35518\n", pos)
+ }
+ break
+ }
+ visited[src] = true
+ dst := src.dst
+ edge := &dst.edges[src.dstEdgeIdx]
+ if edge.src != src {
+ base.Fatalf("path inconsistency: %v != %v", edge.src, src)
+ }
+
+ explanation = b.explainFlow(pos, dst, src, edge.derefs, edge.notes, explanation)
+
+ if dst == root {
+ break
+ }
+ src = dst
+ }
+
+ return explanation
+}
+
+func (b *batch) explainFlow(pos string, dst, srcloc *location, derefs int, notes *note, explanation []*logopt.LoggedOpt) []*logopt.LoggedOpt {
+ ops := "&"
+ if derefs >= 0 {
+ ops = strings.Repeat("*", derefs)
+ }
+ print := base.Flag.LowerM >= 2
+
+ flow := fmt.Sprintf(" flow: %s = %s%v:", b.explainLoc(dst), ops, b.explainLoc(srcloc))
+ if print {
+ fmt.Printf("%s:%s\n", pos, flow)
+ }
+ if logopt.Enabled() {
+ var epos src.XPos
+ if notes != nil {
+ epos = notes.where.Pos()
+ } else if srcloc != nil && srcloc.n != nil {
+ epos = srcloc.n.Pos()
+ }
+ var e_curfn *ir.Func // TODO(mdempsky): Fix.
+ explanation = append(explanation, logopt.NewLoggedOpt(epos, "escflow", "escape", ir.FuncName(e_curfn), flow))
+ }
+
+ for note := notes; note != nil; note = note.next {
+ if print {
+ fmt.Printf("%s: from %v (%v) at %s\n", pos, note.where, note.why, base.FmtPos(note.where.Pos()))
+ }
+ if logopt.Enabled() {
+ var e_curfn *ir.Func // TODO(mdempsky): Fix.
+ explanation = append(explanation, logopt.NewLoggedOpt(note.where.Pos(), "escflow", "escape", ir.FuncName(e_curfn),
+ fmt.Sprintf(" from %v (%v)", note.where, note.why)))
+ }
+ }
+ return explanation
+}
+
+func (b *batch) explainLoc(l *location) string {
+ if l == &b.heapLoc {
+ return "{heap}"
+ }
+ if l.n == nil {
+ // TODO(mdempsky): Omit entirely.
+ return "{temp}"
+ }
+ if l.n.Op() == ir.ONAME {
+ return fmt.Sprintf("%v", l.n)
+ }
+ return fmt.Sprintf("{storage for %v}", l.n)
+}
+
+// outlives reports whether values stored in l may survive beyond
+// other's lifetime if stack allocated.
+func (b *batch) outlives(l, other *location) bool {
+ // The heap outlives everything.
+ if l.escapes {
+ return true
+ }
+
+ // We don't know what callers do with returned values, so
+ // pessimistically we need to assume they flow to the heap and
+ // outlive everything too.
+ if l.isName(ir.PPARAMOUT) {
+ // Exception: Directly called closures can return
+ // locations allocated outside of them without forcing
+ // them to the heap. For example:
+ //
+ // var u int // okay to stack allocate
+ // *(func() *int { return &u }()) = 42
+ if containsClosure(other.curfn, l.curfn) && l.curfn.ClosureCalled() {
+ return false
+ }
+
+ return true
+ }
+
+ // If l and other are within the same function, then l
+ // outlives other if it was declared outside other's loop
+ // scope. For example:
+ //
+ // var l *int
+ // for {
+ // l = new(int)
+ // }
+ if l.curfn == other.curfn && l.loopDepth < other.loopDepth {
+ return true
+ }
+
+ // If other is declared within a child closure of where l is
+ // declared, then l outlives it. For example:
+ //
+ // var l *int
+ // func() {
+ // l = new(int)
+ // }
+ if containsClosure(l.curfn, other.curfn) {
+ return true
+ }
+
+ return false
+}
+
+// containsClosure reports whether c is a closure contained within f.
+func containsClosure(f, c *ir.Func) bool {
+ // Common case.
+ if f == c {
+ return false
+ }
+
+ // Closures within function Foo are named like "Foo.funcN..."
+ // TODO(mdempsky): Better way to recognize this.
+ fn := f.Sym().Name
+ cn := c.Sym().Name
+ return len(cn) > len(fn) && cn[:len(fn)] == fn && cn[len(fn)] == '.'
+}
diff --git a/src/cmd/compile/internal/escape/stmt.go b/src/cmd/compile/internal/escape/stmt.go
new file mode 100644
index 0000000000..c71848b8a1
--- /dev/null
+++ b/src/cmd/compile/internal/escape/stmt.go
@@ -0,0 +1,207 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package escape
+
+import (
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "fmt"
+)
+
+// stmt evaluates a single Go statement.
+func (e *escape) stmt(n ir.Node) {
+ if n == nil {
+ return
+ }
+
+ lno := ir.SetPos(n)
+ defer func() {
+ base.Pos = lno
+ }()
+
+ if base.Flag.LowerM > 2 {
+ fmt.Printf("%v:[%d] %v stmt: %v\n", base.FmtPos(base.Pos), e.loopDepth, e.curfn, n)
+ }
+
+ e.stmts(n.Init())
+
+ switch n.Op() {
+ default:
+ base.Fatalf("unexpected stmt: %v", n)
+
+ case ir.ODCLCONST, ir.ODCLTYPE, ir.OFALL, ir.OINLMARK:
+ // nop
+
+ case ir.OBREAK, ir.OCONTINUE, ir.OGOTO:
+ // TODO(mdempsky): Handle dead code?
+
+ case ir.OBLOCK:
+ n := n.(*ir.BlockStmt)
+ e.stmts(n.List)
+
+ case ir.ODCL:
+ // Record loop depth at declaration.
+ n := n.(*ir.Decl)
+ if !ir.IsBlank(n.X) {
+ e.dcl(n.X)
+ }
+
+ case ir.OLABEL:
+ n := n.(*ir.LabelStmt)
+ switch e.labels[n.Label] {
+ case nonlooping:
+ if base.Flag.LowerM > 2 {
+ fmt.Printf("%v:%v non-looping label\n", base.FmtPos(base.Pos), n)
+ }
+ case looping:
+ if base.Flag.LowerM > 2 {
+ fmt.Printf("%v: %v looping label\n", base.FmtPos(base.Pos), n)
+ }
+ e.loopDepth++
+ default:
+ base.Fatalf("label missing tag")
+ }
+ delete(e.labels, n.Label)
+
+ case ir.OIF:
+ n := n.(*ir.IfStmt)
+ e.discard(n.Cond)
+ e.block(n.Body)
+ e.block(n.Else)
+
+ case ir.OFOR, ir.OFORUNTIL:
+ n := n.(*ir.ForStmt)
+ e.loopDepth++
+ e.discard(n.Cond)
+ e.stmt(n.Post)
+ e.block(n.Body)
+ e.loopDepth--
+
+ case ir.ORANGE:
+ // for Key, Value = range X { Body }
+ n := n.(*ir.RangeStmt)
+
+ // X is evaluated outside the loop.
+ tmp := e.newLoc(nil, false)
+ e.expr(tmp.asHole(), n.X)
+
+ e.loopDepth++
+ ks := e.addrs([]ir.Node{n.Key, n.Value})
+ if n.X.Type().IsArray() {
+ e.flow(ks[1].note(n, "range"), tmp)
+ } else {
+ e.flow(ks[1].deref(n, "range-deref"), tmp)
+ }
+ e.reassigned(ks, n)
+
+ e.block(n.Body)
+ e.loopDepth--
+
+ case ir.OSWITCH:
+ n := n.(*ir.SwitchStmt)
+
+ if guard, ok := n.Tag.(*ir.TypeSwitchGuard); ok {
+ var ks []hole
+ if guard.Tag != nil {
+ for _, cas := range n.Cases {
+ cv := cas.Var
+ k := e.dcl(cv) // type switch variables have no ODCL.
+ if cv.Type().HasPointers() {
+ ks = append(ks, k.dotType(cv.Type(), cas, "switch case"))
+ }
+ }
+ }
+ e.expr(e.teeHole(ks...), n.Tag.(*ir.TypeSwitchGuard).X)
+ } else {
+ e.discard(n.Tag)
+ }
+
+ for _, cas := range n.Cases {
+ e.discards(cas.List)
+ e.block(cas.Body)
+ }
+
+ case ir.OSELECT:
+ n := n.(*ir.SelectStmt)
+ for _, cas := range n.Cases {
+ e.stmt(cas.Comm)
+ e.block(cas.Body)
+ }
+ case ir.ORECV:
+ // TODO(mdempsky): Consider e.discard(n.Left).
+ n := n.(*ir.UnaryExpr)
+ e.exprSkipInit(e.discardHole(), n) // already visited n.Ninit
+ case ir.OSEND:
+ n := n.(*ir.SendStmt)
+ e.discard(n.Chan)
+ e.assignHeap(n.Value, "send", n)
+
+ case ir.OAS:
+ n := n.(*ir.AssignStmt)
+ e.assignList([]ir.Node{n.X}, []ir.Node{n.Y}, "assign", n)
+ case ir.OASOP:
+ n := n.(*ir.AssignOpStmt)
+ // TODO(mdempsky): Worry about OLSH/ORSH?
+ e.assignList([]ir.Node{n.X}, []ir.Node{n.Y}, "assign", n)
+ case ir.OAS2:
+ n := n.(*ir.AssignListStmt)
+ e.assignList(n.Lhs, n.Rhs, "assign-pair", n)
+
+ case ir.OAS2DOTTYPE: // v, ok = x.(type)
+ n := n.(*ir.AssignListStmt)
+ e.assignList(n.Lhs, n.Rhs, "assign-pair-dot-type", n)
+ case ir.OAS2MAPR: // v, ok = m[k]
+ n := n.(*ir.AssignListStmt)
+ e.assignList(n.Lhs, n.Rhs, "assign-pair-mapr", n)
+ case ir.OAS2RECV, ir.OSELRECV2: // v, ok = <-ch
+ n := n.(*ir.AssignListStmt)
+ e.assignList(n.Lhs, n.Rhs, "assign-pair-receive", n)
+
+ case ir.OAS2FUNC:
+ n := n.(*ir.AssignListStmt)
+ e.stmts(n.Rhs[0].Init())
+ ks := e.addrs(n.Lhs)
+ e.call(ks, n.Rhs[0])
+ e.reassigned(ks, n)
+ case ir.ORETURN:
+ n := n.(*ir.ReturnStmt)
+ results := e.curfn.Type().Results().FieldSlice()
+ dsts := make([]ir.Node, len(results))
+ for i, res := range results {
+ dsts[i] = res.Nname.(*ir.Name)
+ }
+ e.assignList(dsts, n.Results, "return", n)
+ case ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER, ir.OINLCALL, ir.OCLOSE, ir.OCOPY, ir.ODELETE, ir.OPANIC, ir.OPRINT, ir.OPRINTN, ir.ORECOVER:
+ e.call(nil, n)
+ case ir.OGO, ir.ODEFER:
+ n := n.(*ir.GoDeferStmt)
+ e.goDeferStmt(n)
+
+ case ir.OTAILCALL:
+ // TODO(mdempsky): Treat like a normal call? esc.go used to just ignore it.
+ }
+}
+
+func (e *escape) stmts(l ir.Nodes) {
+ for _, n := range l {
+ e.stmt(n)
+ }
+}
+
+// block is like stmts, but preserves loopDepth.
+func (e *escape) block(l ir.Nodes) {
+ old := e.loopDepth
+ e.stmts(l)
+ e.loopDepth = old
+}
+
+func (e *escape) dcl(n *ir.Name) hole {
+ if n.Curfn != e.curfn || n.IsClosureVar() {
+ base.Fatalf("bad declaration of %v", n)
+ }
+ loc := e.oldLoc(n)
+ loc.loopDepth = e.loopDepth
+ return loc.asHole()
+}
diff --git a/src/cmd/compile/internal/escape/utils.go b/src/cmd/compile/internal/escape/utils.go
new file mode 100644
index 0000000000..5f462ef570
--- /dev/null
+++ b/src/cmd/compile/internal/escape/utils.go
@@ -0,0 +1,215 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package escape
+
+import (
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/typecheck"
+)
+
+func isSliceSelfAssign(dst, src ir.Node) bool {
+ // Detect the following special case.
+ //
+ // func (b *Buffer) Foo() {
+ // n, m := ...
+ // b.buf = b.buf[n:m]
+ // }
+ //
+ // This assignment is a no-op for escape analysis,
+ // it does not store any new pointers into b that were not already there.
+ // However, without this special case b will escape, because we assign to OIND/ODOTPTR.
+ // Here we assume that the statement will not contain calls,
+ // that is, that order will move any calls to init.
+ // Otherwise base ONAME value could change between the moments
+ // when we evaluate it for dst and for src.
+
+ // dst is ONAME dereference.
+ var dstX ir.Node
+ switch dst.Op() {
+ default:
+ return false
+ case ir.ODEREF:
+ dst := dst.(*ir.StarExpr)
+ dstX = dst.X
+ case ir.ODOTPTR:
+ dst := dst.(*ir.SelectorExpr)
+ dstX = dst.X
+ }
+ if dstX.Op() != ir.ONAME {
+ return false
+ }
+ // src is a slice operation.
+ switch src.Op() {
+ case ir.OSLICE, ir.OSLICE3, ir.OSLICESTR:
+ // OK.
+ case ir.OSLICEARR, ir.OSLICE3ARR:
+ // Since arrays are embedded into containing object,
+ // slice of non-pointer array will introduce a new pointer into b that was not already there
+ // (pointer to b itself). After such assignment, if b contents escape,
+ // b escapes as well. If we ignore such OSLICEARR, we will conclude
+ // that b does not escape when b contents do.
+ //
+ // Pointer to an array is OK since it's not stored inside b directly.
+ // For slicing an array (not pointer to array), there is an implicit OADDR.
+ // We check that to determine non-pointer array slicing.
+ src := src.(*ir.SliceExpr)
+ if src.X.Op() == ir.OADDR {
+ return false
+ }
+ default:
+ return false
+ }
+ // slice is applied to ONAME dereference.
+ var baseX ir.Node
+ switch base := src.(*ir.SliceExpr).X; base.Op() {
+ default:
+ return false
+ case ir.ODEREF:
+ base := base.(*ir.StarExpr)
+ baseX = base.X
+ case ir.ODOTPTR:
+ base := base.(*ir.SelectorExpr)
+ baseX = base.X
+ }
+ if baseX.Op() != ir.ONAME {
+ return false
+ }
+ // dst and src reference the same base ONAME.
+ return dstX.(*ir.Name) == baseX.(*ir.Name)
+}
+
+// isSelfAssign reports whether assignment from src to dst can
+// be ignored by the escape analysis as it's effectively a self-assignment.
+func isSelfAssign(dst, src ir.Node) bool {
+ if isSliceSelfAssign(dst, src) {
+ return true
+ }
+
+ // Detect trivial assignments that assign back to the same object.
+ //
+ // It covers these cases:
+ // val.x = val.y
+ // val.x[i] = val.y[j]
+ // val.x1.x2 = val.x1.y2
+ // ... etc
+ //
+ // These assignments do not change assigned object lifetime.
+
+ if dst == nil || src == nil || dst.Op() != src.Op() {
+ return false
+ }
+
+ // The expression prefix must be both "safe" and identical.
+ switch dst.Op() {
+ case ir.ODOT, ir.ODOTPTR:
+ // Safe trailing accessors that are permitted to differ.
+ dst := dst.(*ir.SelectorExpr)
+ src := src.(*ir.SelectorExpr)
+ return ir.SameSafeExpr(dst.X, src.X)
+ case ir.OINDEX:
+ dst := dst.(*ir.IndexExpr)
+ src := src.(*ir.IndexExpr)
+ if mayAffectMemory(dst.Index) || mayAffectMemory(src.Index) {
+ return false
+ }
+ return ir.SameSafeExpr(dst.X, src.X)
+ default:
+ return false
+ }
+}
+
+// mayAffectMemory reports whether evaluation of n may affect the program's
+// memory state. If the expression can't affect memory state, then it can be
+// safely ignored by the escape analysis.
+func mayAffectMemory(n ir.Node) bool {
+ // We may want to use a list of "memory safe" ops instead of generally
+ // "side-effect free", which would include all calls and other ops that can
+ // allocate or change global state. For now, it's safer to start with the latter.
+ //
+ // We're ignoring things like division by zero, index out of range,
+ // and nil pointer dereference here.
+
+ // TODO(rsc): It seems like it should be possible to replace this with
+ // an ir.Any looking for any op that's not the ones in the case statement.
+ // But that produces changes in the compiled output detected by buildall.
+ switch n.Op() {
+ case ir.ONAME, ir.OLITERAL, ir.ONIL:
+ return false
+
+ case ir.OADD, ir.OSUB, ir.OOR, ir.OXOR, ir.OMUL, ir.OLSH, ir.ORSH, ir.OAND, ir.OANDNOT, ir.ODIV, ir.OMOD:
+ n := n.(*ir.BinaryExpr)
+ return mayAffectMemory(n.X) || mayAffectMemory(n.Y)
+
+ case ir.OINDEX:
+ n := n.(*ir.IndexExpr)
+ return mayAffectMemory(n.X) || mayAffectMemory(n.Index)
+
+ case ir.OCONVNOP, ir.OCONV:
+ n := n.(*ir.ConvExpr)
+ return mayAffectMemory(n.X)
+
+ case ir.OLEN, ir.OCAP, ir.ONOT, ir.OBITNOT, ir.OPLUS, ir.ONEG, ir.OALIGNOF, ir.OOFFSETOF, ir.OSIZEOF:
+ n := n.(*ir.UnaryExpr)
+ return mayAffectMemory(n.X)
+
+ case ir.ODOT, ir.ODOTPTR:
+ n := n.(*ir.SelectorExpr)
+ return mayAffectMemory(n.X)
+
+ case ir.ODEREF:
+ n := n.(*ir.StarExpr)
+ return mayAffectMemory(n.X)
+
+ default:
+ return true
+ }
+}
+
+// HeapAllocReason returns the reason the given Node must be heap
+// allocated, or the empty string if it doesn't.
+func HeapAllocReason(n ir.Node) string {
+ if n == nil || n.Type() == nil {
+ return ""
+ }
+
+ // Parameters are always passed via the stack.
+ if n.Op() == ir.ONAME {
+ n := n.(*ir.Name)
+ if n.Class == ir.PPARAM || n.Class == ir.PPARAMOUT {
+ return ""
+ }
+ }
+
+ if n.Type().Width > ir.MaxStackVarSize {
+ return "too large for stack"
+ }
+
+ if (n.Op() == ir.ONEW || n.Op() == ir.OPTRLIT) && n.Type().Elem().Width > ir.MaxImplicitStackVarSize {
+ return "too large for stack"
+ }
+
+ if n.Op() == ir.OCLOSURE && typecheck.ClosureType(n.(*ir.ClosureExpr)).Size() > ir.MaxImplicitStackVarSize {
+ return "too large for stack"
+ }
+ if n.Op() == ir.OMETHVALUE && typecheck.MethodValueType(n.(*ir.SelectorExpr)).Size() > ir.MaxImplicitStackVarSize {
+ return "too large for stack"
+ }
+
+ if n.Op() == ir.OMAKESLICE {
+ n := n.(*ir.MakeExpr)
+ r := n.Cap
+ if r == nil {
+ r = n.Len
+ }
+ if !ir.IsSmallIntConst(r) {
+ return "non-constant size"
+ }
+ if t := n.Type(); t.Elem().Width != 0 && ir.Int64Val(r) > ir.MaxImplicitStackVarSize/t.Elem().Width {
+ return "too large for stack"
+ }
+ }
+
+ return ""
+}
diff --git a/src/cmd/compile/internal/gc/export.go b/src/cmd/compile/internal/gc/export.go
index 2137f1d196..9bf3c7240a 100644
--- a/src/cmd/compile/internal/gc/export.go
+++ b/src/cmd/compile/internal/gc/export.go
@@ -5,46 +5,16 @@
package gc
import (
+ "fmt"
+ "go/constant"
+
"cmd/compile/internal/base"
- "cmd/compile/internal/inline"
"cmd/compile/internal/ir"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/bio"
- "fmt"
- "go/constant"
)
-func exportf(bout *bio.Writer, format string, args ...interface{}) {
- fmt.Fprintf(bout, format, args...)
- if base.Debug.Export != 0 {
- fmt.Printf(format, args...)
- }
-}
-
-func dumpexport(bout *bio.Writer) {
- p := &exporter{marked: make(map[*types.Type]bool)}
- for _, n := range typecheck.Target.Exports {
- // Must catch it here rather than Export(), because the type can be
- // not fully set (still TFORW) when Export() is called.
- if n.Type() != nil && n.Type().HasTParam() {
- base.Fatalf("Cannot (yet) export a generic type: %v", n)
- }
- p.markObject(n)
- }
-
- // The linker also looks for the $$ marker - use char after $$ to distinguish format.
- exportf(bout, "\n$$B\n") // indicate binary export format
- off := bout.Offset()
- typecheck.WriteExports(bout.Writer)
- size := bout.Offset() - off
- exportf(bout, "\n$$\n")
-
- if base.Debug.Export != 0 {
- fmt.Printf("BenchmarkExportSize:%s 1 %d bytes\n", base.Ctxt.Pkgpath, size)
- }
-}
-
func dumpasmhdr() {
b, err := bio.Create(base.Flag.AsmHdr)
if err != nil {
@@ -79,83 +49,3 @@ func dumpasmhdr() {
b.Close()
}
-
-type exporter struct {
- marked map[*types.Type]bool // types already seen by markType
-}
-
-// markObject visits a reachable object.
-func (p *exporter) markObject(n ir.Node) {
- if n.Op() == ir.ONAME {
- n := n.(*ir.Name)
- if n.Class == ir.PFUNC {
- inline.Inline_Flood(n, typecheck.Export)
- }
- }
-
- p.markType(n.Type())
-}
-
-// markType recursively visits types reachable from t to identify
-// functions whose inline bodies may be needed.
-func (p *exporter) markType(t *types.Type) {
- if p.marked[t] {
- return
- }
- p.marked[t] = true
-
- // If this is a named type, mark all of its associated
- // methods. Skip interface types because t.Methods contains
- // only their unexpanded method set (i.e., exclusive of
- // interface embeddings), and the switch statement below
- // handles their full method set.
- if t.Sym() != nil && t.Kind() != types.TINTER {
- for _, m := range t.Methods().Slice() {
- if types.IsExported(m.Sym.Name) {
- p.markObject(ir.AsNode(m.Nname))
- }
- }
- }
-
- // Recursively mark any types that can be produced given a
- // value of type t: dereferencing a pointer; indexing or
- // iterating over an array, slice, or map; receiving from a
- // channel; accessing a struct field or interface method; or
- // calling a function.
- //
- // Notably, we don't mark function parameter types, because
- // the user already needs some way to construct values of
- // those types.
- switch t.Kind() {
- case types.TPTR, types.TARRAY, types.TSLICE:
- p.markType(t.Elem())
-
- case types.TCHAN:
- if t.ChanDir().CanRecv() {
- p.markType(t.Elem())
- }
-
- case types.TMAP:
- p.markType(t.Key())
- p.markType(t.Elem())
-
- case types.TSTRUCT:
- for _, f := range t.FieldSlice() {
- if types.IsExported(f.Sym.Name) || f.Embedded != 0 {
- p.markType(f.Type)
- }
- }
-
- case types.TFUNC:
- for _, f := range t.Results().FieldSlice() {
- p.markType(f.Type)
- }
-
- case types.TINTER:
- for _, f := range t.AllMethods().Slice() {
- if types.IsExported(f.Sym.Name) {
- p.markType(f.Type)
- }
- }
- }
-}
diff --git a/src/cmd/compile/internal/gc/main.go b/src/cmd/compile/internal/gc/main.go
index ce50cbb4c2..9660ef9dd5 100644
--- a/src/cmd/compile/internal/gc/main.go
+++ b/src/cmd/compile/internal/gc/main.go
@@ -32,6 +32,7 @@ import (
"log"
"os"
"runtime"
+ "sort"
)
func hidePanic() {
@@ -159,9 +160,6 @@ func Main(archInit func(*ssagen.ArchInfo)) {
dwarf.EnableLogging(base.Debug.DwarfInl != 0)
}
if base.Debug.SoftFloat != 0 {
- if buildcfg.Experiment.RegabiArgs {
- log.Fatalf("softfloat mode with GOEXPERIMENT=regabiargs not implemented ")
- }
ssagen.Arch.SoftFloat = true
}
@@ -181,23 +179,41 @@ func Main(archInit func(*ssagen.ArchInfo)) {
typecheck.Target = new(ir.Package)
- typecheck.NeedITab = func(t, iface *types.Type) { reflectdata.ITabAddr(t, iface) }
typecheck.NeedRuntimeType = reflectdata.NeedRuntimeType // TODO(rsc): TypeSym for lock?
base.AutogeneratedPos = makePos(src.NewFileBase("<autogenerated>", "<autogenerated>"), 1, 0)
typecheck.InitUniverse()
+ typecheck.InitRuntime()
// Parse and typecheck input.
noder.LoadPackage(flag.Args())
dwarfgen.RecordPackageName()
+ // Prepare for backend processing. This must happen before pkginit,
+ // because it generates itabs for initializing global variables.
+ ssagen.InitConfig()
+
// Build init task.
if initTask := pkginit.Task(); initTask != nil {
typecheck.Export(initTask)
}
+ // Stability quirk: sort top-level declarations, so we're not
+ // sensitive to the order that functions are added. In particular,
+ // the order that noder+typecheck add function closures is very
+ // subtle, and not important to reproduce.
+ //
+ // Note: This needs to happen after pkginit.Task, otherwise it risks
+ // changing the order in which top-level variables are initialized.
+ if base.Debug.UnifiedQuirks != 0 {
+ s := typecheck.Target.Decls
+ sort.SliceStable(s, func(i, j int) bool {
+ return s[i].Pos().Before(s[j].Pos())
+ })
+ }
+
// Eliminate some obviously dead code.
// Must happen after typechecking.
for _, n := range typecheck.Target.Decls {
@@ -252,6 +268,11 @@ func Main(archInit func(*ssagen.ArchInfo)) {
base.Timer.Start("fe", "escapes")
escape.Funcs(typecheck.Target.Decls)
+ // TODO(mdempsky): This is a hack. We need a proper, global work
+ // queue for scheduling function compilation so components don't
+ // need to adjust their behavior depending on when they're called.
+ reflectdata.AfterGlobalEscapeAnalysis = true
+
// Collect information for go:nowritebarrierrec
// checking. This must happen before transforming closures during Walk
// We'll do the final check after write barriers are
@@ -260,17 +281,7 @@ func Main(archInit func(*ssagen.ArchInfo)) {
ssagen.EnableNoWriteBarrierRecCheck()
}
- // Prepare for SSA compilation.
- // This must be before CompileITabs, because CompileITabs
- // can trigger function compilation.
- typecheck.InitRuntime()
- ssagen.InitConfig()
-
- // Just before compilation, compile itabs found on
- // the right side of OCONVIFACE so that methods
- // can be de-virtualized during compilation.
ir.CurFunc = nil
- reflectdata.CompileITabs()
// Compile top level functions.
// Don't use range--walk can add functions to Target.Decls.
@@ -278,6 +289,10 @@ func Main(archInit func(*ssagen.ArchInfo)) {
fcount := int64(0)
for i := 0; i < len(typecheck.Target.Decls); i++ {
if fn, ok := typecheck.Target.Decls[i].(*ir.Func); ok {
+ // Don't try compiling dead hidden closure.
+ if fn.IsDeadcodeClosure() {
+ continue
+ }
enqueueFunc(fn)
fcount++
}
diff --git a/src/cmd/compile/internal/gc/obj.go b/src/cmd/compile/internal/gc/obj.go
index 474d718525..c86bf5f084 100644
--- a/src/cmd/compile/internal/gc/obj.go
+++ b/src/cmd/compile/internal/gc/obj.go
@@ -7,6 +7,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
+ "cmd/compile/internal/noder"
"cmd/compile/internal/objw"
"cmd/compile/internal/reflectdata"
"cmd/compile/internal/staticdata"
@@ -103,7 +104,7 @@ func finishArchiveEntry(bout *bio.Writer, start int64, name string) {
func dumpCompilerObj(bout *bio.Writer) {
printObjHeader(bout)
- dumpexport(bout)
+ noder.WriteExports(bout)
}
func dumpdata() {
@@ -116,7 +117,7 @@ func dumpdata() {
addsignats(typecheck.Target.Externs)
reflectdata.WriteRuntimeTypes()
reflectdata.WriteTabs()
- numPTabs, numITabs := reflectdata.CountTabs()
+ numPTabs := reflectdata.CountPTabs()
reflectdata.WriteImportStrings()
reflectdata.WriteBasicTypes()
dumpembeds()
@@ -157,13 +158,10 @@ func dumpdata() {
if numExports != len(typecheck.Target.Exports) {
base.Fatalf("Target.Exports changed after compile functions loop")
}
- newNumPTabs, newNumITabs := reflectdata.CountTabs()
+ newNumPTabs := reflectdata.CountPTabs()
if newNumPTabs != numPTabs {
base.Fatalf("ptabs changed after compile functions loop")
}
- if newNumITabs != numITabs {
- base.Fatalf("itabs changed after compile functions loop")
- }
}
func dumpLinkerObj(bout *bio.Writer) {
diff --git a/src/cmd/compile/internal/importer/exportdata.go b/src/cmd/compile/internal/importer/exportdata.go
index 3925a64314..6a672be9c1 100644
--- a/src/cmd/compile/internal/importer/exportdata.go
+++ b/src/cmd/compile/internal/importer/exportdata.go
@@ -1,4 +1,3 @@
-// UNREVIEWED
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
diff --git a/src/cmd/compile/internal/importer/gcimporter.go b/src/cmd/compile/internal/importer/gcimporter.go
index feb18cf2c9..ff40be65bb 100644
--- a/src/cmd/compile/internal/importer/gcimporter.go
+++ b/src/cmd/compile/internal/importer/gcimporter.go
@@ -1,4 +1,3 @@
-// UNREVIEWED
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
@@ -156,7 +155,7 @@ func Import(packages map[string]*types2.Package, path, srcDir string, lookup fun
// binary export format starts with a 'c', 'd', or 'v'
// (from "version"). Select appropriate importer.
if len(data) > 0 && data[0] == 'i' {
- _, pkg, err = iImportData(packages, data[1:], id)
+ pkg, err = ImportData(packages, string(data[1:]), id)
} else {
err = fmt.Errorf("import %q: old binary export format no longer supported (recompile library)", path)
}
diff --git a/src/cmd/compile/internal/importer/gcimporter_test.go b/src/cmd/compile/internal/importer/gcimporter_test.go
index 7fb8fed59c..44c5e06cd6 100644
--- a/src/cmd/compile/internal/importer/gcimporter_test.go
+++ b/src/cmd/compile/internal/importer/gcimporter_test.go
@@ -1,4 +1,3 @@
-// UNREVIEWED
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
@@ -10,7 +9,6 @@ import (
"cmd/compile/internal/types2"
"fmt"
"internal/testenv"
- "io/ioutil"
"os"
"os/exec"
"path/filepath"
@@ -64,7 +62,7 @@ const maxTime = 30 * time.Second
func testDir(t *testing.T, dir string, endTime time.Time) (nimports int) {
dirname := filepath.Join(runtime.GOROOT(), "pkg", runtime.GOOS+"_"+runtime.GOARCH, dir)
- list, err := ioutil.ReadDir(dirname)
+ list, err := os.ReadDir(dirname)
if err != nil {
t.Fatalf("testDir(%s): %s", dirname, err)
}
@@ -92,7 +90,7 @@ func testDir(t *testing.T, dir string, endTime time.Time) (nimports int) {
}
func mktmpdir(t *testing.T) string {
- tmpdir, err := ioutil.TempDir("", "gcimporter_test")
+ tmpdir, err := os.MkdirTemp("", "gcimporter_test")
if err != nil {
t.Fatal("mktmpdir:", err)
}
@@ -142,7 +140,7 @@ func TestVersionHandling(t *testing.T) {
}
const dir = "./testdata/versions"
- list, err := ioutil.ReadDir(dir)
+ list, err := os.ReadDir(dir)
if err != nil {
t.Fatal(err)
}
@@ -195,7 +193,7 @@ func TestVersionHandling(t *testing.T) {
// create file with corrupted export data
// 1) read file
- data, err := ioutil.ReadFile(filepath.Join(dir, name))
+ data, err := os.ReadFile(filepath.Join(dir, name))
if err != nil {
t.Fatal(err)
}
@@ -212,7 +210,7 @@ func TestVersionHandling(t *testing.T) {
// 4) write the file
pkgpath += "_corrupted"
filename := filepath.Join(corruptdir, pkgpath) + ".a"
- ioutil.WriteFile(filename, data, 0666)
+ os.WriteFile(filename, data, 0666)
// test that importing the corrupted file results in an error
_, err = Import(make(map[string]*types2.Package), pkgpath, corruptdir, nil)
@@ -261,8 +259,7 @@ var importedObjectTests = []struct {
{"io.Reader", "type Reader interface{Read(p []byte) (n int, err error)}"},
{"io.ReadWriter", "type ReadWriter interface{Reader; Writer}"},
{"go/ast.Node", "type Node interface{End() go/token.Pos; Pos() go/token.Pos}"},
- // go/types.Type has grown much larger - excluded for now
- // {"go/types.Type", "type Type interface{String() string; Underlying() Type}"},
+ {"go/types.Type", "type Type interface{String() string; Underlying() Type}"},
}
func TestImportedTypes(t *testing.T) {
@@ -457,17 +454,17 @@ func TestIssue13898(t *testing.T) {
t.Fatal("go/types not found")
}
- // look for go/types2.Object type
+ // look for go/types.Object type
obj := lookupObj(t, goTypesPkg.Scope(), "Object")
typ, ok := obj.Type().(*types2.Named)
if !ok {
- t.Fatalf("go/types2.Object type is %v; wanted named type", typ)
+ t.Fatalf("go/types.Object type is %v; wanted named type", typ)
}
- // lookup go/types2.Object.Pkg method
+ // lookup go/types.Object.Pkg method
m, index, indirect := types2.LookupFieldOrMethod(typ, false, nil, "Pkg")
if m == nil {
- t.Fatalf("go/types2.Object.Pkg not found (index = %v, indirect = %v)", index, indirect)
+ t.Fatalf("go/types.Object.Pkg not found (index = %v, indirect = %v)", index, indirect)
}
// the method must belong to go/types
diff --git a/src/cmd/compile/internal/importer/iimport.go b/src/cmd/compile/internal/importer/iimport.go
index 8ab0b7b989..c303126ea6 100644
--- a/src/cmd/compile/internal/importer/iimport.go
+++ b/src/cmd/compile/internal/importer/iimport.go
@@ -1,4 +1,3 @@
-// UNREVIEWED
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
@@ -9,7 +8,6 @@
package importer
import (
- "bytes"
"cmd/compile/internal/syntax"
"cmd/compile/internal/types2"
"encoding/binary"
@@ -19,10 +17,11 @@ import (
"io"
"math/big"
"sort"
+ "strings"
)
type intReader struct {
- *bytes.Reader
+ *strings.Reader
path string
}
@@ -42,6 +41,21 @@ func (r *intReader) uint64() uint64 {
return i
}
+// Keep this in sync with constants in iexport.go.
+const (
+ iexportVersionGo1_11 = 0
+ iexportVersionPosCol = 1
+ // TODO: before release, change this back to 2.
+ iexportVersionGenerics = iexportVersionPosCol
+
+ iexportVersionCurrent = iexportVersionGenerics
+)
+
+type ident struct {
+ pkg string
+ name string
+}
+
const predeclReserved = 32
type itag uint64
@@ -57,6 +71,9 @@ const (
signatureType
structType
interfaceType
+ typeParamType
+ instType
+ unionType
)
const io_SeekCurrent = 1 // io.SeekCurrent (not defined in Go 1.4)
@@ -65,8 +82,8 @@ const io_SeekCurrent = 1 // io.SeekCurrent (not defined in Go 1.4)
// and returns the number of bytes consumed and a reference to the package.
// If the export data version is not recognized or the format is otherwise
// compromised, an error is returned.
-func iImportData(imports map[string]*types2.Package, data []byte, path string) (_ int, pkg *types2.Package, err error) {
- const currentVersion = 1
+func ImportData(imports map[string]*types2.Package, data, path string) (pkg *types2.Package, err error) {
+ const currentVersion = iexportVersionCurrent
version := int64(-1)
defer func() {
if e := recover(); e != nil {
@@ -78,13 +95,17 @@ func iImportData(imports map[string]*types2.Package, data []byte, path string) (
}
}()
- r := &intReader{bytes.NewReader(data), path}
+ r := &intReader{strings.NewReader(data), path}
version = int64(r.uint64())
switch version {
- case currentVersion, 0:
+ case /* iexportVersionGenerics, */ iexportVersionPosCol, iexportVersionGo1_11:
default:
- errorf("unknown iexport format version %d", version)
+ if version > iexportVersionGenerics {
+ errorf("unstable iexport format version %d, just rebuild compiler and std library", version)
+ } else {
+ errorf("unknown iexport format version %d", version)
+ }
}
sLen := int64(r.uint64())
@@ -96,16 +117,20 @@ func iImportData(imports map[string]*types2.Package, data []byte, path string) (
r.Seek(sLen+dLen, io_SeekCurrent)
p := iimporter{
- ipath: path,
- version: int(version),
+ exportVersion: version,
+ ipath: path,
+ version: int(version),
- stringData: stringData,
- stringCache: make(map[uint64]string),
- pkgCache: make(map[uint64]*types2.Package),
+ stringData: stringData,
+ pkgCache: make(map[uint64]*types2.Package),
+ posBaseCache: make(map[uint64]*syntax.PosBase),
declData: declData,
pkgIndex: make(map[*types2.Package]map[string]uint64),
typCache: make(map[uint64]types2.Type),
+ // Separate map for typeparams, keyed by their package and unique
+ // name (name with subscript).
+ tparamIndex: make(map[ident]types2.Type),
}
for i, pt := range predeclared {
@@ -117,17 +142,22 @@ func iImportData(imports map[string]*types2.Package, data []byte, path string) (
pkgPathOff := r.uint64()
pkgPath := p.stringAt(pkgPathOff)
pkgName := p.stringAt(r.uint64())
- _ = r.uint64() // package height; unused by go/types
+ pkgHeight := int(r.uint64())
if pkgPath == "" {
pkgPath = path
}
pkg := imports[pkgPath]
if pkg == nil {
- pkg = types2.NewPackage(pkgPath, pkgName)
+ pkg = types2.NewPackageHeight(pkgPath, pkgName, pkgHeight)
imports[pkgPath] = pkg
- } else if pkg.Name() != pkgName {
- errorf("conflicting names %s and %s for package %q", pkg.Name(), pkgName, path)
+ } else {
+ if pkg.Name() != pkgName {
+ errorf("conflicting names %s and %s for package %q", pkg.Name(), pkgName, path)
+ }
+ if pkg.Height() != pkgHeight {
+ errorf("conflicting heights %v and %v for package %q", pkg.Height(), pkgHeight, path)
+ }
}
p.pkgCache[pkgPathOff] = pkg
@@ -153,10 +183,6 @@ func iImportData(imports map[string]*types2.Package, data []byte, path string) (
p.doDecl(localpkg, name)
}
- for _, typ := range p.interfaceList {
- typ.Complete()
- }
-
// record all referenced packages as imports
list := append(([]*types2.Package)(nil), pkgList[1:]...)
sort.Sort(byPath(list))
@@ -165,21 +191,22 @@ func iImportData(imports map[string]*types2.Package, data []byte, path string) (
// package was imported completely and without errors
localpkg.MarkComplete()
- consumed, _ := r.Seek(0, io_SeekCurrent)
- return int(consumed), localpkg, nil
+ return localpkg, nil
}
type iimporter struct {
- ipath string
- version int
+ exportVersion int64
+ ipath string
+ version int
- stringData []byte
- stringCache map[uint64]string
- pkgCache map[uint64]*types2.Package
+ stringData string
+ pkgCache map[uint64]*types2.Package
+ posBaseCache map[uint64]*syntax.PosBase
- declData []byte
- pkgIndex map[*types2.Package]map[string]uint64
- typCache map[uint64]types2.Type
+ declData string
+ pkgIndex map[*types2.Package]map[string]uint64
+ typCache map[uint64]types2.Type
+ tparamIndex map[ident]types2.Type
interfaceList []*types2.Interface
}
@@ -199,24 +226,21 @@ func (p *iimporter) doDecl(pkg *types2.Package, name string) {
// Reader.Reset is not available in Go 1.4.
// Use bytes.NewReader for now.
// r.declReader.Reset(p.declData[off:])
- r.declReader = *bytes.NewReader(p.declData[off:])
+ r.declReader = *strings.NewReader(p.declData[off:])
r.obj(name)
}
func (p *iimporter) stringAt(off uint64) string {
- if s, ok := p.stringCache[off]; ok {
- return s
- }
+ var x [binary.MaxVarintLen64]byte
+ n := copy(x[:], p.stringData[off:])
- slen, n := binary.Uvarint(p.stringData[off:])
+ slen, n := binary.Uvarint(x[:n])
if n <= 0 {
errorf("varint failed")
}
spos := off + uint64(n)
- s := string(p.stringData[spos : spos+slen])
- p.stringCache[off] = s
- return s
+ return p.stringData[spos : spos+slen]
}
func (p *iimporter) pkgAt(off uint64) *types2.Package {
@@ -228,6 +252,16 @@ func (p *iimporter) pkgAt(off uint64) *types2.Package {
return nil
}
+func (p *iimporter) posBaseAt(off uint64) *syntax.PosBase {
+ if posBase, ok := p.posBaseCache[off]; ok {
+ return posBase
+ }
+ filename := p.stringAt(off)
+ posBase := syntax.NewTrimmedFileBase(filename, true)
+ p.posBaseCache[off] = posBase
+ return posBase
+}
+
func (p *iimporter) typAt(off uint64, base *types2.Named) types2.Type {
if t, ok := p.typCache[off]; ok && (base == nil || !isInterface(t)) {
return t
@@ -241,7 +275,7 @@ func (p *iimporter) typAt(off uint64, base *types2.Named) types2.Type {
// Reader.Reset is not available in Go 1.4.
// Use bytes.NewReader for now.
// r.declReader.Reset(p.declData[off-predeclReserved:])
- r.declReader = *bytes.NewReader(p.declData[off-predeclReserved:])
+ r.declReader = *strings.NewReader(p.declData[off-predeclReserved:])
t := r.doType(base)
if base == nil || !isInterface(t) {
@@ -251,12 +285,12 @@ func (p *iimporter) typAt(off uint64, base *types2.Named) types2.Type {
}
type importReader struct {
- p *iimporter
- declReader bytes.Reader
- currPkg *types2.Package
- prevFile string
- prevLine int64
- prevColumn int64
+ p *iimporter
+ declReader strings.Reader
+ currPkg *types2.Package
+ prevPosBase *syntax.PosBase
+ prevLine int64
+ prevColumn int64
}
func (r *importReader) obj(name string) {
@@ -274,16 +308,28 @@ func (r *importReader) obj(name string) {
r.declare(types2.NewConst(pos, r.currPkg, name, typ, val))
- case 'F':
+ case 'F', 'G':
+ var tparams []*types2.TypeParam
+ if tag == 'G' {
+ tparams = r.tparamList()
+ }
sig := r.signature(nil)
-
+ sig.SetTParams(tparams)
r.declare(types2.NewFunc(pos, r.currPkg, name, sig))
- case 'T':
+ case 'T', 'U':
+ var tparams []*types2.TypeParam
+ if tag == 'U' {
+ tparams = r.tparamList()
+ }
+
// Types can be recursive. We need to setup a stub
// declaration before recursing.
obj := types2.NewTypeName(pos, r.currPkg, name, nil)
named := types2.NewNamed(obj, nil, nil)
+ if tag == 'U' {
+ named.SetTParams(tparams)
+ }
r.declare(obj)
underlying := r.p.typAt(r.uint64(), named).Underlying()
@@ -296,10 +342,43 @@ func (r *importReader) obj(name string) {
recv := r.param()
msig := r.signature(recv)
+ // If the receiver has any targs, set those as the
+ // rparams of the method (since those are the
+ // typeparams being used in the method sig/body).
+ targs := baseType(msig.Recv().Type()).TArgs()
+ if targs.Len() > 0 {
+ rparams := make([]*types2.TypeParam, targs.Len())
+ for i := range rparams {
+ rparams[i] = types2.AsTypeParam(targs.At(i))
+ }
+ msig.SetRParams(rparams)
+ }
+
named.AddMethod(types2.NewFunc(mpos, r.currPkg, mname, msig))
}
}
+ case 'P':
+ // We need to "declare" a typeparam in order to have a name that
+ // can be referenced recursively (if needed) in the type param's
+ // bound.
+ if r.p.exportVersion < iexportVersionGenerics {
+ errorf("unexpected type param type")
+ }
+ name0, sub := parseSubscript(name)
+ tn := types2.NewTypeName(pos, r.currPkg, name0, nil)
+ t := (*types2.Checker)(nil).NewTypeParam(tn, nil)
+ if sub == 0 {
+ errorf("missing subscript")
+ }
+ t.SetId(sub)
+ // To handle recursive references to the typeparam within its
+ // bound, save the partial type in tparamIndex before reading the bounds.
+ id := ident{r.currPkg.Name(), name}
+ r.p.tparamIndex[id] = t
+
+ t.SetConstraint(r.typ())
+
case 'V':
typ := r.typ()
@@ -439,12 +518,11 @@ func (r *importReader) pos() syntax.Pos {
r.posv0()
}
- if r.prevFile == "" && r.prevLine == 0 && r.prevColumn == 0 {
+ if (r.prevPosBase == nil || r.prevPosBase.Filename() == "") && r.prevLine == 0 && r.prevColumn == 0 {
return syntax.Pos{}
}
- // TODO(gri) fix this
- // return r.p.fake.pos(r.prevFile, int(r.prevLine), int(r.prevColumn))
- return syntax.Pos{}
+
+ return syntax.MakePos(r.prevPosBase, uint(r.prevLine), uint(r.prevColumn))
}
func (r *importReader) posv0() {
@@ -454,7 +532,7 @@ func (r *importReader) posv0() {
} else if l := r.int64(); l == -1 {
r.prevLine += deltaNewFile
} else {
- r.prevFile = r.string()
+ r.prevPosBase = r.posBase()
r.prevLine = l
}
}
@@ -466,7 +544,7 @@ func (r *importReader) posv1() {
delta = r.int64()
r.prevLine += delta >> 1
if delta&1 != 0 {
- r.prevFile = r.string()
+ r.prevPosBase = r.posBase()
}
}
}
@@ -480,8 +558,9 @@ func isInterface(t types2.Type) bool {
return ok
}
-func (r *importReader) pkg() *types2.Package { return r.p.pkgAt(r.uint64()) }
-func (r *importReader) string() string { return r.p.stringAt(r.uint64()) }
+func (r *importReader) pkg() *types2.Package { return r.p.pkgAt(r.uint64()) }
+func (r *importReader) string() string { return r.p.stringAt(r.uint64()) }
+func (r *importReader) posBase() *syntax.PosBase { return r.p.posBaseAt(r.uint64()) }
func (r *importReader) doType(base *types2.Named) types2.Type {
switch k := r.kind(); k {
@@ -554,6 +633,49 @@ func (r *importReader) doType(base *types2.Named) types2.Type {
typ := types2.NewInterfaceType(methods, embeddeds)
r.p.interfaceList = append(r.p.interfaceList, typ)
return typ
+
+ case typeParamType:
+ if r.p.exportVersion < iexportVersionGenerics {
+ errorf("unexpected type param type")
+ }
+ pkg, name := r.qualifiedIdent()
+ id := ident{pkg.Name(), name}
+ if t, ok := r.p.tparamIndex[id]; ok {
+ // We're already in the process of importing this typeparam.
+ return t
+ }
+ // Otherwise, import the definition of the typeparam now.
+ r.p.doDecl(pkg, name)
+ return r.p.tparamIndex[id]
+
+ case instType:
+ if r.p.exportVersion < iexportVersionGenerics {
+ errorf("unexpected instantiation type")
+ }
+ // pos does not matter for instances: they are positioned on the original
+ // type.
+ _ = r.pos()
+ len := r.uint64()
+ targs := make([]types2.Type, len)
+ for i := range targs {
+ targs[i] = r.typ()
+ }
+ baseType := r.typ()
+ // The imported instantiated type doesn't include any methods, so
+ // we must always use the methods of the base (orig) type.
+ // TODO provide a non-nil *Checker
+ t, _ := types2.Instantiate(nil, baseType, targs, false)
+ return t
+
+ case unionType:
+ if r.p.exportVersion < iexportVersionGenerics {
+ errorf("unexpected instantiation type")
+ }
+ terms := make([]*types2.Term, r.uint64())
+ for i := range terms {
+ terms[i] = types2.NewTerm(r.bool(), r.typ())
+ }
+ return types2.NewUnion(terms)
}
}
@@ -568,6 +690,19 @@ func (r *importReader) signature(recv *types2.Var) *types2.Signature {
return types2.NewSignature(recv, params, results, variadic)
}
+func (r *importReader) tparamList() []*types2.TypeParam {
+ n := r.uint64()
+ if n == 0 {
+ return nil
+ }
+ xs := make([]*types2.TypeParam, n)
+ for i := range xs {
+ typ := r.typ()
+ xs[i] = types2.AsTypeParam(typ)
+ }
+ return xs
+}
+
func (r *importReader) paramList() *types2.Tuple {
xs := make([]*types2.Var, r.uint64())
for i := range xs {
@@ -610,3 +745,33 @@ func (r *importReader) byte() byte {
}
return x
}
+
+func baseType(typ types2.Type) *types2.Named {
+ // pointer receivers are never types2.Named types
+ if p, _ := typ.(*types2.Pointer); p != nil {
+ typ = p.Elem()
+ }
+ // receiver base types are always (possibly generic) types2.Named types
+ n, _ := typ.(*types2.Named)
+ return n
+}
+
+func parseSubscript(name string) (string, uint64) {
+ // Extract the subscript value from the type param name. We export
+ // and import the subscript value, so that all type params have
+ // unique names.
+ sub := uint64(0)
+ startsub := -1
+ for i, r := range name {
+ if '₀' <= r && r < '₀'+10 {
+ if startsub == -1 {
+ startsub = i
+ }
+ sub = sub*10 + uint64(r-'₀')
+ }
+ }
+ if startsub >= 0 {
+ name = name[:startsub]
+ }
+ return name, sub
+}
diff --git a/src/cmd/compile/internal/importer/support.go b/src/cmd/compile/internal/importer/support.go
index 40b9c7c958..6ceb413601 100644
--- a/src/cmd/compile/internal/importer/support.go
+++ b/src/cmd/compile/internal/importer/support.go
@@ -1,4 +1,3 @@
-// UNREVIEWED
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
@@ -120,6 +119,9 @@ var predeclared = []types2.Type{
// used internally by gc; never used by this package or in .a files
anyType{},
+
+ // comparable
+ types2.Universe.Lookup("comparable").Type(),
}
type anyType struct{}
diff --git a/src/cmd/compile/internal/inline/inl.go b/src/cmd/compile/internal/inline/inl.go
index d6b4ced4e1..45a533fcaf 100644
--- a/src/cmd/compile/internal/inline/inl.go
+++ b/src/cmd/compile/internal/inline/inl.go
@@ -179,6 +179,8 @@ func CanInline(fn *ir.Func) {
Cost: inlineMaxBudget - visitor.budget,
Dcl: pruneUnusedAutos(n.Defn.(*ir.Func).Dcl, &visitor),
Body: inlcopylist(fn.Body),
+
+ CanDelayResults: canDelayResults(fn),
}
if base.Flag.LowerM > 1 {
@@ -191,60 +193,36 @@ func CanInline(fn *ir.Func) {
}
}
-// Inline_Flood marks n's inline body for export and recursively ensures
-// all called functions are marked too.
-func Inline_Flood(n *ir.Name, exportsym func(*ir.Name)) {
- if n == nil {
- return
- }
- if n.Op() != ir.ONAME || n.Class != ir.PFUNC {
- base.Fatalf("Inline_Flood: unexpected %v, %v, %v", n, n.Op(), n.Class)
- }
- fn := n.Func
- if fn == nil {
- base.Fatalf("Inline_Flood: missing Func on %v", n)
- }
- if fn.Inl == nil {
- return
- }
-
- if fn.ExportInline() {
- return
- }
- fn.SetExportInline(true)
-
- typecheck.ImportedBody(fn)
-
- var doFlood func(n ir.Node)
- doFlood = func(n ir.Node) {
- switch n.Op() {
- case ir.OMETHEXPR, ir.ODOTMETH:
- Inline_Flood(ir.MethodExprName(n), exportsym)
+// canDelayResults reports whether inlined calls to fn can delay
+// declaring the result parameter until the "return" statement.
+func canDelayResults(fn *ir.Func) bool {
+ // We can delay declaring+initializing result parameters if:
+ // (1) there's exactly one "return" statement in the inlined function;
+ // (2) it's not an empty return statement (#44355); and
+ // (3) the result parameters aren't named.
- case ir.ONAME:
- n := n.(*ir.Name)
- switch n.Class {
- case ir.PFUNC:
- Inline_Flood(n, exportsym)
- exportsym(n)
- case ir.PEXTERN:
- exportsym(n)
+ nreturns := 0
+ ir.VisitList(fn.Body, func(n ir.Node) {
+ if n, ok := n.(*ir.ReturnStmt); ok {
+ nreturns++
+ if len(n.Results) == 0 {
+ nreturns++ // empty return statement (case 2)
}
+ }
+ })
- case ir.OCALLPART:
- // Okay, because we don't yet inline indirect
- // calls to method values.
- case ir.OCLOSURE:
- // VisitList doesn't visit closure bodies, so force a
- // recursive call to VisitList on the body of the closure.
- ir.VisitList(n.(*ir.ClosureExpr).Func.Body, doFlood)
+ if nreturns != 1 {
+ return false // not exactly one return statement (case 1)
+ }
+
+ // temporaries for return values.
+ for _, param := range fn.Type().Results().FieldSlice() {
+ if sym := types.OrigSym(param.Sym); sym != nil && !sym.IsBlank() {
+ return false // found a named result parameter (case 3)
}
}
- // Recursively identify all referenced functions for
- // reexport. We want to include even non-called functions,
- // because after inlining they might be callable.
- ir.VisitList(ir.Nodes(fn.Inl.Body), doFlood)
+ return true
}
// hairyVisitor visits a function body to determine its inlining
@@ -295,6 +273,19 @@ func (v *hairyVisitor) doNode(n ir.Node) bool {
}
}
}
+ if n.X.Op() == ir.OMETHEXPR {
+ if meth := ir.MethodExprName(n.X); meth != nil {
+ fn := meth.Func
+ if fn != nil && types.IsRuntimePkg(fn.Sym().Pkg) && fn.Sym().Name == "heapBits.nextArena" {
+ // Special case: explicitly allow
+ // mid-stack inlining of
+ // runtime.heapBits.next even though
+ // it calls slow-path
+ // runtime.heapBits.nextArena.
+ break
+ }
+ }
+ }
if ir.IsIntrinsicCall(n) {
// Treat like any other node.
@@ -309,28 +300,8 @@ func (v *hairyVisitor) doNode(n ir.Node) bool {
// Call cost for non-leaf inlining.
v.budget -= v.extraCallCost
- // Call is okay if inlinable and we have the budget for the body.
case ir.OCALLMETH:
- n := n.(*ir.CallExpr)
- t := n.X.Type()
- if t == nil {
- base.Fatalf("no function type for [%p] %+v\n", n.X, n.X)
- }
- fn := ir.MethodExprName(n.X).Func
- if types.IsRuntimePkg(fn.Sym().Pkg) && fn.Sym().Name == "heapBits.nextArena" {
- // Special case: explicitly allow
- // mid-stack inlining of
- // runtime.heapBits.next even though
- // it calls slow-path
- // runtime.heapBits.nextArena.
- break
- }
- if fn.Inl != nil {
- v.budget -= fn.Inl.Cost
- break
- }
- // Call cost for non-leaf inlining.
- v.budget -= v.extraCallCost
+ base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
// Things that are too hairy, irrespective of the budget
case ir.OCALL, ir.OCALLINTER:
@@ -445,7 +416,7 @@ func (v *hairyVisitor) doNode(n ir.Node) bool {
// and don't charge for the OBLOCK itself. The ++ undoes the -- below.
v.budget++
- case ir.OCALLPART, ir.OSLICELIT:
+ case ir.OMETHVALUE, ir.OSLICELIT:
v.budget-- // Hack for toolstash -cmp.
case ir.OMETHEXPR:
@@ -499,9 +470,6 @@ func inlcopy(n ir.Node) ir.Node {
// x.Func.Body for iexport and local inlining.
oldfn := x.Func
newfn := ir.NewFunc(oldfn.Pos())
- if oldfn.ClosureCalled() {
- newfn.SetClosureCalled(true)
- }
m.(*ir.ClosureExpr).Func = newfn
newfn.Nname = ir.NewNameAt(oldfn.Nname.Pos(), oldfn.Nname.Sym())
// XXX OK to share fn.Type() ??
@@ -544,37 +512,6 @@ func InlineCalls(fn *ir.Func) {
ir.CurFunc = savefn
}
-// Turn an OINLCALL into a statement.
-func inlconv2stmt(inlcall *ir.InlinedCallExpr) ir.Node {
- n := ir.NewBlockStmt(inlcall.Pos(), nil)
- n.List = inlcall.Init()
- n.List.Append(inlcall.Body.Take()...)
- return n
-}
-
-// Turn an OINLCALL into a single valued expression.
-// The result of inlconv2expr MUST be assigned back to n, e.g.
-// n.Left = inlconv2expr(n.Left)
-func inlconv2expr(n *ir.InlinedCallExpr) ir.Node {
- r := n.ReturnVars[0]
- return ir.InitExpr(append(n.Init(), n.Body...), r)
-}
-
-// Turn the rlist (with the return values) of the OINLCALL in
-// n into an expression list lumping the ninit and body
-// containing the inlined statements on the first list element so
-// order will be preserved. Used in return, oas2func and call
-// statements.
-func inlconv2list(n *ir.InlinedCallExpr) []ir.Node {
- if n.Op() != ir.OINLCALL || len(n.ReturnVars) == 0 {
- base.Fatalf("inlconv2list %+v\n", n)
- }
-
- s := n.ReturnVars
- s[0] = ir.InitExpr(append(n.Init(), n.Body...), s[0])
- return s
-}
-
// inlnode recurses over the tree to find inlineable calls, which will
// be turned into OINLCALLs by mkinlcall. When the recursion comes
// back up will examine left, right, list, rlist, ninit, ntest, nincr,
@@ -597,7 +534,9 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No
case ir.ODEFER, ir.OGO:
n := n.(*ir.GoDeferStmt)
switch call := n.Call; call.Op() {
- case ir.OCALLFUNC, ir.OCALLMETH:
+ case ir.OCALLMETH:
+ base.FatalfAt(call.Pos(), "OCALLMETH missed by typecheck")
+ case ir.OCALLFUNC:
call := call.(*ir.CallExpr)
call.NoInline = true
}
@@ -607,11 +546,18 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No
case ir.OCLOSURE:
return n
case ir.OCALLMETH:
- // Prevent inlining some reflect.Value methods when using checkptr,
- // even when package reflect was compiled without it (#35073).
+ base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
+ case ir.OCALLFUNC:
n := n.(*ir.CallExpr)
- if s := ir.MethodExprName(n.X).Sym(); base.Debug.Checkptr != 0 && types.IsReflectPkg(s.Pkg) && (s.Name == "Value.UnsafeAddr" || s.Name == "Value.Pointer") {
- return n
+ if n.X.Op() == ir.OMETHEXPR {
+ // Prevent inlining some reflect.Value methods when using checkptr,
+ // even when package reflect was compiled without it (#35073).
+ if meth := ir.MethodExprName(n.X); meth != nil {
+ s := meth.Sym()
+ if base.Debug.Checkptr != 0 && types.IsReflectPkg(s.Pkg) && (s.Name == "Value.UnsafeAddr" || s.Name == "Value.Pointer") {
+ return n
+ }
+ }
}
}
@@ -619,31 +565,18 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No
ir.EditChildren(n, edit)
- if as := n; as.Op() == ir.OAS2FUNC {
- as := as.(*ir.AssignListStmt)
- if as.Rhs[0].Op() == ir.OINLCALL {
- as.Rhs = inlconv2list(as.Rhs[0].(*ir.InlinedCallExpr))
- as.SetOp(ir.OAS2)
- as.SetTypecheck(0)
- n = typecheck.Stmt(as)
- }
- }
-
// with all the branches out of the way, it is now time to
// transmogrify this node itself unless inhibited by the
// switch at the top of this function.
switch n.Op() {
- case ir.OCALLFUNC, ir.OCALLMETH:
- n := n.(*ir.CallExpr)
- if n.NoInline {
- return n
- }
- }
+ case ir.OCALLMETH:
+ base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
- var call *ir.CallExpr
- switch n.Op() {
case ir.OCALLFUNC:
- call = n.(*ir.CallExpr)
+ call := n.(*ir.CallExpr)
+ if call.NoInline {
+ break
+ }
if base.Flag.LowerM > 3 {
fmt.Printf("%v:call to func %+v\n", ir.Line(n), call.X)
}
@@ -653,38 +586,10 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No
if fn := inlCallee(call.X); fn != nil && fn.Inl != nil {
n = mkinlcall(call, fn, maxCost, inlMap, edit)
}
-
- case ir.OCALLMETH:
- call = n.(*ir.CallExpr)
- if base.Flag.LowerM > 3 {
- fmt.Printf("%v:call to meth %v\n", ir.Line(n), call.X.(*ir.SelectorExpr).Sel)
- }
-
- // typecheck should have resolved ODOTMETH->type, whose nname points to the actual function.
- if call.X.Type() == nil {
- base.Fatalf("no function type for [%p] %+v\n", call.X, call.X)
- }
-
- n = mkinlcall(call, ir.MethodExprName(call.X).Func, maxCost, inlMap, edit)
}
base.Pos = lno
- if n.Op() == ir.OINLCALL {
- ic := n.(*ir.InlinedCallExpr)
- switch call.Use {
- default:
- ir.Dump("call", call)
- base.Fatalf("call missing use")
- case ir.CallUseExpr:
- n = inlconv2expr(ic)
- case ir.CallUseStmt:
- n = inlconv2stmt(ic)
- case ir.CallUseList:
- // leave for caller to convert
- }
- }
-
return n
}
@@ -740,7 +645,12 @@ var inlgen int
// when producing output for debugging the compiler itself.
var SSADumpInline = func(*ir.Func) {}
-// If n is a call node (OCALLFUNC or OCALLMETH), and fn is an ONAME node for a
+// NewInline allows the inliner implementation to be overridden.
+// If it returns nil, the legacy inliner will handle this call
+// instead.
+var NewInline = func(call *ir.CallExpr, fn *ir.Func, inlIndex int) *ir.InlinedCallExpr { return nil }
+
+// If n is a OCALLFUNC node, and fn is an ONAME node for a
// function with an inlinable body, return an OINLCALL node that can replace n.
// The returned node's Ninit has the parameter assignments, the Nbody is the
// inlined function body, and (List, Rlist) contain the (input, output)
@@ -793,38 +703,90 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
defer func() {
inlMap[fn] = false
}()
- if base.Debug.TypecheckInl == 0 {
- typecheck.ImportedBody(fn)
+
+ typecheck.FixVariadicCall(n)
+
+ parent := base.Ctxt.PosTable.Pos(n.Pos()).Base().InliningIndex()
+
+ sym := fn.Linksym()
+ inlIndex := base.Ctxt.InlTree.Add(parent, n.Pos(), sym)
+
+ if base.Flag.GenDwarfInl > 0 {
+ if !sym.WasInlined() {
+ base.Ctxt.DwFixups.SetPrecursorFunc(sym, fn)
+ sym.Set(obj.AttrWasInlined, true)
+ }
}
- // We have a function node, and it has an inlineable body.
- if base.Flag.LowerM > 1 {
- fmt.Printf("%v: inlining call to %v %v { %v }\n", ir.Line(n), fn.Sym(), fn.Type(), ir.Nodes(fn.Inl.Body))
- } else if base.Flag.LowerM != 0 {
+ if base.Flag.LowerM != 0 {
fmt.Printf("%v: inlining call to %v\n", ir.Line(n), fn)
}
if base.Flag.LowerM > 2 {
fmt.Printf("%v: Before inlining: %+v\n", ir.Line(n), n)
}
- SSADumpInline(fn)
+ res := NewInline(n, fn, inlIndex)
+ if res == nil {
+ res = oldInline(n, fn, inlIndex)
+ }
+
+ // transitive inlining
+ // might be nice to do this before exporting the body,
+ // but can't emit the body with inlining expanded.
+ // instead we emit the things that the body needs
+ // and each use must redo the inlining.
+ // luckily these are small.
+ ir.EditChildren(res, edit)
- ninit := n.Init()
+ if base.Flag.LowerM > 2 {
+ fmt.Printf("%v: After inlining %+v\n\n", ir.Line(res), res)
+ }
- // For normal function calls, the function callee expression
- // may contain side effects (e.g., added by addinit during
- // inlconv2expr or inlconv2list). Make sure to preserve these,
- // if necessary (#42703).
- if n.Op() == ir.OCALLFUNC {
- callee := n.X
- for callee.Op() == ir.OCONVNOP {
+ return res
+}
+
+// CalleeEffects appends any side effects from evaluating callee to init.
+func CalleeEffects(init *ir.Nodes, callee ir.Node) {
+ for {
+ switch callee.Op() {
+ case ir.ONAME, ir.OCLOSURE, ir.OMETHEXPR:
+ return // done
+
+ case ir.OCONVNOP:
conv := callee.(*ir.ConvExpr)
- ninit.Append(ir.TakeInit(conv)...)
+ init.Append(ir.TakeInit(conv)...)
callee = conv.X
+
+ case ir.OINLCALL:
+ ic := callee.(*ir.InlinedCallExpr)
+ init.Append(ir.TakeInit(ic)...)
+ init.Append(ic.Body.Take()...)
+ callee = ic.SingleResult()
+
+ default:
+ base.FatalfAt(callee.Pos(), "unexpected callee expression: %v", callee)
}
- if callee.Op() != ir.ONAME && callee.Op() != ir.OCLOSURE && callee.Op() != ir.OMETHEXPR {
- base.Fatalf("unexpected callee expression: %v", callee)
- }
+ }
+}
+
+// oldInline creates an InlinedCallExpr to replace the given call
+// expression. fn is the callee function to be inlined. inlIndex is
+// the inlining tree position index, for use with src.NewInliningBase
+// when rewriting positions.
+func oldInline(call *ir.CallExpr, fn *ir.Func, inlIndex int) *ir.InlinedCallExpr {
+ if base.Debug.TypecheckInl == 0 {
+ typecheck.ImportedBody(fn)
+ }
+
+ SSADumpInline(fn)
+
+ ninit := call.Init()
+
+ // For normal function calls, the function callee expression
+ // may contain side effects. Make sure to preserve these,
+ // if necessary (#42703).
+ if call.Op() == ir.OCALLFUNC {
+ CalleeEffects(&ninit, call.X)
}
// Make temp names to use instead of the originals.
@@ -854,25 +816,6 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
}
// We can delay declaring+initializing result parameters if:
- // (1) there's exactly one "return" statement in the inlined function;
- // (2) it's not an empty return statement (#44355); and
- // (3) the result parameters aren't named.
- delayretvars := true
-
- nreturns := 0
- ir.VisitList(ir.Nodes(fn.Inl.Body), func(n ir.Node) {
- if n, ok := n.(*ir.ReturnStmt); ok {
- nreturns++
- if len(n.Results) == 0 {
- delayretvars = false // empty return statement (case 2)
- }
- }
- })
-
- if nreturns != 1 {
- delayretvars = false // not exactly one return statement (case 1)
- }
-
// temporaries for return values.
var retvars []ir.Node
for i, t := range fn.Type().Results().Fields().Slice() {
@@ -882,7 +825,6 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
m = inlvar(n)
m = typecheck.Expr(m).(*ir.Name)
inlvars[n] = m
- delayretvars = false // found a named result parameter (case 3)
} else {
// anonymous return values, synthesize names for use in assignment that replaces return
m = retvar(t, i)
@@ -905,61 +847,23 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
// Assign arguments to the parameters' temp names.
as := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
as.Def = true
- if n.Op() == ir.OCALLMETH {
- sel := n.X.(*ir.SelectorExpr)
- if sel.X == nil {
- base.Fatalf("method call without receiver: %+v", n)
- }
- as.Rhs.Append(sel.X)
+ if call.Op() == ir.OCALLMETH {
+ base.FatalfAt(call.Pos(), "OCALLMETH missed by typecheck")
}
- as.Rhs.Append(n.Args...)
-
- // For non-dotted calls to variadic functions, we assign the
- // variadic parameter's temp name separately.
- var vas *ir.AssignStmt
+ as.Rhs.Append(call.Args...)
if recv := fn.Type().Recv(); recv != nil {
as.Lhs.Append(inlParam(recv, as, inlvars))
}
for _, param := range fn.Type().Params().Fields().Slice() {
- // For ordinary parameters or variadic parameters in
- // dotted calls, just add the variable to the
- // assignment list, and we're done.
- if !param.IsDDD() || n.IsDDD {
- as.Lhs.Append(inlParam(param, as, inlvars))
- continue
- }
-
- // Otherwise, we need to collect the remaining values
- // to pass as a slice.
-
- x := len(as.Lhs)
- for len(as.Lhs) < len(as.Rhs) {
- as.Lhs.Append(argvar(param.Type, len(as.Lhs)))
- }
- varargs := as.Lhs[x:]
-
- vas = ir.NewAssignStmt(base.Pos, nil, nil)
- vas.X = inlParam(param, vas, inlvars)
- if len(varargs) == 0 {
- vas.Y = typecheck.NodNil()
- vas.Y.SetType(param.Type)
- } else {
- lit := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(param.Type), nil)
- lit.List = varargs
- vas.Y = lit
- }
+ as.Lhs.Append(inlParam(param, as, inlvars))
}
if len(as.Rhs) != 0 {
ninit.Append(typecheck.Stmt(as))
}
- if vas != nil {
- ninit.Append(typecheck.Stmt(vas))
- }
-
- if !delayretvars {
+ if !fn.Inl.CanDelayResults {
// Zero the return parameters.
for _, n := range retvars {
ninit.Append(ir.NewDecl(base.Pos, ir.ODCL, n.(*ir.Name)))
@@ -972,40 +876,21 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
inlgen++
- parent := -1
- if b := base.Ctxt.PosTable.Pos(n.Pos()).Base(); b != nil {
- parent = b.InliningIndex()
- }
-
- sym := fn.Linksym()
- newIndex := base.Ctxt.InlTree.Add(parent, n.Pos(), sym)
-
// Add an inline mark just before the inlined body.
// This mark is inline in the code so that it's a reasonable spot
// to put a breakpoint. Not sure if that's really necessary or not
// (in which case it could go at the end of the function instead).
// Note issue 28603.
- inlMark := ir.NewInlineMarkStmt(base.Pos, types.BADWIDTH)
- inlMark.SetPos(n.Pos().WithIsStmt())
- inlMark.Index = int64(newIndex)
- ninit.Append(inlMark)
-
- if base.Flag.GenDwarfInl > 0 {
- if !sym.WasInlined() {
- base.Ctxt.DwFixups.SetPrecursorFunc(sym, fn)
- sym.Set(obj.AttrWasInlined, true)
- }
- }
+ ninit.Append(ir.NewInlineMarkStmt(call.Pos().WithIsStmt(), int64(inlIndex)))
subst := inlsubst{
- retlabel: retlabel,
- retvars: retvars,
- delayretvars: delayretvars,
- inlvars: inlvars,
- defnMarker: ir.NilExpr{},
- bases: make(map[*src.PosBase]*src.PosBase),
- newInlIndex: newIndex,
- fn: fn,
+ retlabel: retlabel,
+ retvars: retvars,
+ inlvars: inlvars,
+ defnMarker: ir.NilExpr{},
+ bases: make(map[*src.PosBase]*src.PosBase),
+ newInlIndex: inlIndex,
+ fn: fn,
}
subst.edit = subst.node
@@ -1026,26 +911,11 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
//dumplist("ninit post", ninit);
- call := ir.NewInlinedCallExpr(base.Pos, nil, nil)
- *call.PtrInit() = ninit
- call.Body = body
- call.ReturnVars = retvars
- call.SetType(n.Type())
- call.SetTypecheck(1)
-
- // transitive inlining
- // might be nice to do this before exporting the body,
- // but can't emit the body with inlining expanded.
- // instead we emit the things that the body needs
- // and each use must redo the inlining.
- // luckily these are small.
- ir.EditChildren(call, edit)
-
- if base.Flag.LowerM > 2 {
- fmt.Printf("%v: After inlining %+v\n\n", ir.Line(call), call)
- }
-
- return call
+ res := ir.NewInlinedCallExpr(base.Pos, body, retvars)
+ res.SetInit(ninit)
+ res.SetType(call.Type())
+ res.SetTypecheck(1)
+ return res
}
// Every time we expand a function we generate a new set of tmpnames,
@@ -1058,8 +928,10 @@ func inlvar(var_ *ir.Name) *ir.Name {
n := typecheck.NewName(var_.Sym())
n.SetType(var_.Type())
+ n.SetTypecheck(1)
n.Class = ir.PAUTO
n.SetUsed(true)
+ n.SetAutoTemp(var_.AutoTemp())
n.Curfn = ir.CurFunc // the calling function, not the called one
n.SetAddrtaken(var_.Addrtaken())
@@ -1071,18 +943,7 @@ func inlvar(var_ *ir.Name) *ir.Name {
func retvar(t *types.Field, i int) *ir.Name {
n := typecheck.NewName(typecheck.LookupNum("~R", i))
n.SetType(t.Type)
- n.Class = ir.PAUTO
- n.SetUsed(true)
- n.Curfn = ir.CurFunc // the calling function, not the called one
- ir.CurFunc.Dcl = append(ir.CurFunc.Dcl, n)
- return n
-}
-
-// Synthesize a variable to store the inlined function's arguments
-// when they come from a multiple return call.
-func argvar(t *types.Type, i int) ir.Node {
- n := typecheck.NewName(typecheck.LookupNum("~arg", i))
- n.SetType(t.Elem())
+ n.SetTypecheck(1)
n.Class = ir.PAUTO
n.SetUsed(true)
n.Curfn = ir.CurFunc // the calling function, not the called one
@@ -1099,10 +960,6 @@ type inlsubst struct {
// Temporary result variables.
retvars []ir.Node
- // Whether result variables should be initialized at the
- // "return" statement.
- delayretvars bool
-
inlvars map[*ir.Name]*ir.Name
// defnMarker is used to mark a Node for reassignment.
// inlsubst.clovar set this during creating new ONAME.
@@ -1157,17 +1014,21 @@ func (subst *inlsubst) fields(oldt *types.Type) []*types.Field {
// clovar creates a new ONAME node for a local variable or param of a closure
// inside a function being inlined.
func (subst *inlsubst) clovar(n *ir.Name) *ir.Name {
- // TODO(danscales): want to get rid of this shallow copy, with code like the
- // following, but it is hard to copy all the necessary flags in a maintainable way.
- // m := ir.NewNameAt(n.Pos(), n.Sym())
- // m.Class = n.Class
- // m.SetType(n.Type())
- // m.SetTypecheck(1)
- //if n.IsClosureVar() {
- // m.SetIsClosureVar(true)
- //}
- m := &ir.Name{}
- *m = *n
+ m := ir.NewNameAt(n.Pos(), n.Sym())
+ m.Class = n.Class
+ m.SetType(n.Type())
+ m.SetTypecheck(1)
+ if n.IsClosureVar() {
+ m.SetIsClosureVar(true)
+ }
+ if n.Addrtaken() {
+ m.SetAddrtaken(true)
+ }
+ if n.Used() {
+ m.SetUsed(true)
+ }
+ m.Defn = n.Defn
+
m.Curfn = subst.newclofn
switch defn := n.Defn.(type) {
@@ -1222,8 +1083,6 @@ func (subst *inlsubst) clovar(n *ir.Name) *ir.Name {
// closure does the necessary substitions for a ClosureExpr n and returns the new
// closure node.
func (subst *inlsubst) closure(n *ir.ClosureExpr) ir.Node {
- m := ir.Copy(n)
-
// Prior to the subst edit, set a flag in the inlsubst to
// indicated that we don't want to update the source positions in
// the new closure. If we do this, it will appear that the closure
@@ -1231,29 +1090,16 @@ func (subst *inlsubst) closure(n *ir.ClosureExpr) ir.Node {
// issue #46234 for more details.
defer func(prev bool) { subst.noPosUpdate = prev }(subst.noPosUpdate)
subst.noPosUpdate = true
- ir.EditChildren(m, subst.edit)
//fmt.Printf("Inlining func %v with closure into %v\n", subst.fn, ir.FuncName(ir.CurFunc))
- // The following is similar to funcLit
oldfn := n.Func
- newfn := ir.NewFunc(oldfn.Pos())
- // These three lines are not strictly necessary, but just to be clear
- // that new function needs to redo typechecking and inlinability.
- newfn.SetTypecheck(0)
- newfn.SetInlinabilityChecked(false)
- newfn.Inl = nil
- newfn.SetIsHiddenClosure(true)
- newfn.Nname = ir.NewNameAt(n.Pos(), ir.BlankNode.Sym())
- newfn.Nname.Func = newfn
+ newfn := ir.NewClosureFunc(oldfn.Pos(), true)
+
// Ntype can be nil for -G=3 mode.
if oldfn.Nname.Ntype != nil {
newfn.Nname.Ntype = subst.node(oldfn.Nname.Ntype).(ir.Ntype)
}
- newfn.Nname.Defn = newfn
-
- m.(*ir.ClosureExpr).Func = newfn
- newfn.OClosure = m.(*ir.ClosureExpr)
if subst.newclofn != nil {
//fmt.Printf("Inlining a closure with a nested closure\n")
@@ -1303,13 +1149,9 @@ func (subst *inlsubst) closure(n *ir.ClosureExpr) ir.Node {
// Actually create the named function for the closure, now that
// the closure is inlined in a specific function.
- m.SetTypecheck(0)
- if oldfn.ClosureCalled() {
- typecheck.Callee(m)
- } else {
- typecheck.Expr(m)
- }
- return m
+ newclo := newfn.OClosure
+ newclo.SetInit(subst.list(n.Init()))
+ return typecheck.Expr(newclo)
}
// node recursively copies a node from the saved pristine body of the
@@ -1391,7 +1233,7 @@ func (subst *inlsubst) node(n ir.Node) ir.Node {
}
as.Rhs = subst.list(n.Results)
- if subst.delayretvars {
+ if subst.fn.Inl.CanDelayResults {
for _, n := range as.Lhs {
as.PtrInit().Append(ir.NewDecl(base.Pos, ir.ODCL, n.(*ir.Name)))
n.Name().Defn = as
diff --git a/src/cmd/compile/internal/ir/expr.go b/src/cmd/compile/internal/ir/expr.go
index f70645f079..f526d987a7 100644
--- a/src/cmd/compile/internal/ir/expr.go
+++ b/src/cmd/compile/internal/ir/expr.go
@@ -142,28 +142,15 @@ func (n *BinaryExpr) SetOp(op Op) {
}
}
-// A CallUse records how the result of the call is used:
-type CallUse byte
-
-const (
- _ CallUse = iota
-
- CallUseExpr // single expression result is used
- CallUseList // list of results are used
- CallUseStmt // results not used - call is a statement
-)
-
// A CallExpr is a function call X(Args).
type CallExpr struct {
miniExpr
origNode
- X Node
- Args Nodes
- KeepAlive []*Name // vars to be kept alive until call returns
- IsDDD bool
- Use CallUse
- NoInline bool
- PreserveClosure bool // disable directClosureCall for this call
+ X Node
+ Args Nodes
+ KeepAlive []*Name // vars to be kept alive until call returns
+ IsDDD bool
+ NoInline bool
}
func NewCallExpr(pos src.XPos, op Op, fun Node, args []Node) *CallExpr {
@@ -181,8 +168,12 @@ func (n *CallExpr) SetOp(op Op) {
switch op {
default:
panic(n.no("SetOp " + op.String()))
- case OCALL, OCALLFUNC, OCALLINTER, OCALLMETH,
- OAPPEND, ODELETE, OGETG, OMAKE, OPRINT, OPRINTN, ORECOVER:
+ case OAPPEND,
+ OCALL, OCALLFUNC, OCALLINTER, OCALLMETH,
+ ODELETE,
+ OGETG, OGETCALLERPC, OGETCALLERSP,
+ OMAKE, OPRINT, OPRINTN,
+ ORECOVER, ORECOVERFP:
n.op = op
}
}
@@ -192,8 +183,10 @@ type ClosureExpr struct {
miniExpr
Func *Func `mknode:"-"`
Prealloc *Name
+ IsGoWrap bool // whether this is wrapper closure of a go statement
}
+// Deprecated: Use NewClosureFunc instead.
func NewClosureExpr(pos src.XPos, fn *Func) *ClosureExpr {
n := &ClosureExpr{Func: fn}
n.op = OCLOSURE
@@ -277,12 +270,12 @@ func (n *ConvExpr) SetOp(op Op) {
switch op {
default:
panic(n.no("SetOp " + op.String()))
- case OCONV, OCONVIFACE, OCONVNOP, OBYTES2STR, OBYTES2STRTMP, ORUNES2STR, OSTR2BYTES, OSTR2BYTESTMP, OSTR2RUNES, ORUNESTR, OSLICE2ARRPTR:
+ case OCONV, OCONVIFACE, OCONVIDATA, OCONVNOP, OBYTES2STR, OBYTES2STRTMP, ORUNES2STR, OSTR2BYTES, OSTR2BYTESTMP, OSTR2RUNES, ORUNESTR, OSLICE2ARRPTR:
n.op = op
}
}
-// An IndexExpr is an index expression X[Y].
+// An IndexExpr is an index expression X[Index].
type IndexExpr struct {
miniExpr
X Node
@@ -323,26 +316,24 @@ func NewKeyExpr(pos src.XPos, key, value Node) *KeyExpr {
// A StructKeyExpr is an Field: Value composite literal key.
type StructKeyExpr struct {
miniExpr
- Field *types.Sym
- Value Node
- Offset int64
+ Field *types.Field
+ Value Node
}
-func NewStructKeyExpr(pos src.XPos, field *types.Sym, value Node) *StructKeyExpr {
+func NewStructKeyExpr(pos src.XPos, field *types.Field, value Node) *StructKeyExpr {
n := &StructKeyExpr{Field: field, Value: value}
n.pos = pos
n.op = OSTRUCTKEY
- n.Offset = types.BADWIDTH
return n
}
-func (n *StructKeyExpr) Sym() *types.Sym { return n.Field }
+func (n *StructKeyExpr) Sym() *types.Sym { return n.Field.Sym }
// An InlinedCallExpr is an inlined function call.
type InlinedCallExpr struct {
miniExpr
Body Nodes
- ReturnVars Nodes
+ ReturnVars Nodes // must be side-effect free
}
func NewInlinedCallExpr(pos src.XPos, body, retvars []Node) *InlinedCallExpr {
@@ -354,6 +345,21 @@ func NewInlinedCallExpr(pos src.XPos, body, retvars []Node) *InlinedCallExpr {
return n
}
+func (n *InlinedCallExpr) SingleResult() Node {
+ if have := len(n.ReturnVars); have != 1 {
+ base.FatalfAt(n.Pos(), "inlined call has %v results, expected 1", have)
+ }
+ if !n.Type().HasShape() && n.ReturnVars[0].Type().HasShape() {
+ // If the type of the call is not a shape, but the type of the return value
+ // is a shape, we need to do an implicit conversion, so the real type
+ // of n is maintained.
+ r := NewConvExpr(n.Pos(), OCONVNOP, n.Type(), n.ReturnVars[0])
+ r.SetTypecheck(1)
+ return r
+ }
+ return n.ReturnVars[0]
+}
+
// A LogicalExpr is a expression X Op Y where Op is && or ||.
// It is separate from BinaryExpr to make room for statements
// that must be executed before Y but after X.
@@ -448,6 +454,20 @@ func (n *ParenExpr) SetOTYPE(t *types.Type) {
t.SetNod(n)
}
+// A RawOrigExpr represents an arbitrary Go expression as a string value.
+// When printed in diagnostics, the string value is written out exactly as-is.
+type RawOrigExpr struct {
+ miniExpr
+ Raw string
+}
+
+func NewRawOrigExpr(pos src.XPos, op Op, raw string) *RawOrigExpr {
+ n := &RawOrigExpr{Raw: raw}
+ n.pos = pos
+ n.op = op
+ return n
+}
+
// A ResultExpr represents a direct access to a result.
type ResultExpr struct {
miniExpr
@@ -494,10 +514,15 @@ func NewNameOffsetExpr(pos src.XPos, name *Name, offset int64, typ *types.Type)
// A SelectorExpr is a selector expression X.Sel.
type SelectorExpr struct {
miniExpr
- X Node
- Sel *types.Sym
+ X Node
+ // Sel is the name of the field or method being selected, without (in the
+ // case of methods) any preceding type specifier. If the field/method is
+ // exported, than the Sym uses the local package regardless of the package
+ // of the containing type.
+ Sel *types.Sym
+ // The actual selected field - may not be filled in until typechecking.
Selection *types.Field
- Prealloc *Name // preallocated storage for OCALLPART, if any
+ Prealloc *Name // preallocated storage for OMETHVALUE, if any
}
func NewSelectorExpr(pos src.XPos, op Op, x Node, sel *types.Sym) *SelectorExpr {
@@ -511,7 +536,7 @@ func (n *SelectorExpr) SetOp(op Op) {
switch op {
default:
panic(n.no("SetOp " + op.String()))
- case OXDOT, ODOT, ODOTPTR, ODOTMETH, ODOTINTER, OCALLPART, OMETHEXPR:
+ case OXDOT, ODOT, ODOTPTR, ODOTMETH, ODOTINTER, OMETHVALUE, OMETHEXPR:
n.op = op
}
}
@@ -652,6 +677,38 @@ func (n *TypeAssertExpr) SetOp(op Op) {
}
}
+// A DynamicTypeAssertExpr asserts that X is of dynamic type T.
+type DynamicTypeAssertExpr struct {
+ miniExpr
+ X Node
+ // N = not an interface
+ // E = empty interface
+ // I = nonempty interface
+ // For E->N, T is a *runtime.type for N
+ // For I->N, T is a *runtime.itab for N+I
+ // For E->I, T is a *runtime.type for I
+ // For I->I, ditto
+ // For I->E, T is a *runtime.type for interface{} (unnecessary, but just to fill in the slot)
+ // For E->E, ditto
+ T Node
+}
+
+func NewDynamicTypeAssertExpr(pos src.XPos, op Op, x, t Node) *DynamicTypeAssertExpr {
+ n := &DynamicTypeAssertExpr{X: x, T: t}
+ n.pos = pos
+ n.op = op
+ return n
+}
+
+func (n *DynamicTypeAssertExpr) SetOp(op Op) {
+ switch op {
+ default:
+ panic(n.no("SetOp " + op.String()))
+ case ODYNAMICDOTTYPE, ODYNAMICDOTTYPE2:
+ n.op = op
+ }
+}
+
// A UnaryExpr is a unary expression Op X,
// or Op(X) for a builtin function that does not end up being a call.
type UnaryExpr struct {
@@ -678,6 +735,11 @@ func (n *UnaryExpr) SetOp(op Op) {
}
}
+// Probably temporary: using Implicit() flag to mark generic function nodes that
+// are called to make getGfInfo analysis easier in one pre-order pass.
+func (n *InstExpr) Implicit() bool { return n.flags&miniExprImplicit != 0 }
+func (n *InstExpr) SetImplicit(b bool) { n.flags.set(miniExprImplicit, b) }
+
// An InstExpr is a generic function or type instantiation.
type InstExpr struct {
miniExpr
@@ -773,6 +835,11 @@ func StaticValue(n Node) Node {
continue
}
+ if n.Op() == OINLCALL {
+ n = n.(*InlinedCallExpr).SingleResult()
+ continue
+ }
+
n1 := staticValue1(n)
if n1 == nil {
return n
@@ -1071,7 +1138,7 @@ func MethodExprName(n Node) *Name {
// MethodExprFunc is like MethodExprName, but returns the types.Field instead.
func MethodExprFunc(n Node) *types.Field {
switch n.Op() {
- case ODOTMETH, OMETHEXPR, OCALLPART:
+ case ODOTMETH, OMETHEXPR, OMETHVALUE:
return n.(*SelectorExpr).Selection
}
base.Fatalf("unexpected node: %v (%v)", n, n.Op())
diff --git a/src/cmd/compile/internal/ir/fmt.go b/src/cmd/compile/internal/ir/fmt.go
index f2ae0f7606..d19fe453ef 100644
--- a/src/cmd/compile/internal/ir/fmt.go
+++ b/src/cmd/compile/internal/ir/fmt.go
@@ -185,6 +185,7 @@ var OpPrec = []int{
OCLOSE: 8,
OCOMPLIT: 8,
OCONVIFACE: 8,
+ OCONVIDATA: 8,
OCONVNOP: 8,
OCONV: 8,
OCOPY: 8,
@@ -237,7 +238,7 @@ var OpPrec = []int{
ODOTTYPE: 8,
ODOT: 8,
OXDOT: 8,
- OCALLPART: 8,
+ OMETHVALUE: 8,
OMETHEXPR: 8,
OPLUS: 7,
ONOT: 7,
@@ -546,7 +547,7 @@ func exprFmt(n Node, s fmt.State, prec int) {
n = nn.X
continue
}
- case OCONV, OCONVNOP, OCONVIFACE:
+ case OCONV, OCONVNOP, OCONVIFACE, OCONVIDATA:
nn := nn.(*ConvExpr)
if nn.Implicit() {
n = nn.X
@@ -567,6 +568,11 @@ func exprFmt(n Node, s fmt.State, prec int) {
return
}
+ if n, ok := n.(*RawOrigExpr); ok {
+ fmt.Fprint(s, n.Raw)
+ return
+ }
+
switch n.Op() {
case OPAREN:
n := n.(*ParenExpr)
@@ -709,6 +715,10 @@ func exprFmt(n Node, s fmt.State, prec int) {
fmt.Fprintf(s, "... argument")
return
}
+ if typ := n.Type(); typ != nil {
+ fmt.Fprintf(s, "%v{%s}", typ, ellipsisIf(len(n.List) != 0))
+ return
+ }
if n.Ntype != nil {
fmt.Fprintf(s, "%v{%s}", n.Ntype, ellipsisIf(len(n.List) != 0))
return
@@ -752,7 +762,7 @@ func exprFmt(n Node, s fmt.State, prec int) {
n := n.(*StructKeyExpr)
fmt.Fprintf(s, "%v:%v", n.Field, n.Value)
- case OXDOT, ODOT, ODOTPTR, ODOTINTER, ODOTMETH, OCALLPART, OMETHEXPR:
+ case OXDOT, ODOT, ODOTPTR, ODOTINTER, ODOTMETH, OMETHVALUE, OMETHEXPR:
n := n.(*SelectorExpr)
exprFmt(n.X, s, nprec)
if n.Sel == nil {
@@ -804,6 +814,7 @@ func exprFmt(n Node, s fmt.State, prec int) {
case OCONV,
OCONVIFACE,
+ OCONVIDATA,
OCONVNOP,
OBYTES2STR,
ORUNES2STR,
@@ -854,6 +865,15 @@ func exprFmt(n Node, s fmt.State, prec int) {
}
fmt.Fprintf(s, "(%.v)", n.Args)
+ case OINLCALL:
+ n := n.(*InlinedCallExpr)
+ // TODO(mdempsky): Print Init and/or Body?
+ if len(n.ReturnVars) == 1 {
+ fmt.Fprintf(s, "%v", n.ReturnVars[0])
+ return
+ }
+ fmt.Fprintf(s, "(.%v)", n.ReturnVars)
+
case OMAKEMAP, OMAKECHAN, OMAKESLICE:
n := n.(*MakeExpr)
if n.Cap != nil {
@@ -986,7 +1006,7 @@ func (l Nodes) Format(s fmt.State, verb rune) {
// Dump prints the message s followed by a debug dump of n.
func Dump(s string, n Node) {
- fmt.Printf("%s [%p]%+v\n", s, n, n)
+ fmt.Printf("%s%+v\n", s, n)
}
// DumpList prints the message s followed by a debug dump of each node in the list.
@@ -1114,16 +1134,21 @@ func dumpNodeHeader(w io.Writer, n Node) {
}
if n.Pos().IsKnown() {
- pfx := ""
+ fmt.Fprint(w, " # ")
switch n.Pos().IsStmt() {
case src.PosNotStmt:
- pfx = "_" // "-" would be confusing
+ fmt.Fprint(w, "_") // "-" would be confusing
case src.PosIsStmt:
- pfx = "+"
+ fmt.Fprint(w, "+")
+ }
+ for i, pos := range base.Ctxt.AllPos(n.Pos(), nil) {
+ if i > 0 {
+ fmt.Fprint(w, ",")
+ }
+ // TODO(mdempsky): Print line pragma details too.
+ file := filepath.Base(pos.Filename())
+ fmt.Fprintf(w, "%s:%d:%d", file, pos.Line(), pos.Col())
}
- pos := base.Ctxt.PosTable.Pos(n.Pos())
- file := filepath.Base(pos.Filename())
- fmt.Fprintf(w, " # %s%s:%d", pfx, file, pos.Line())
}
}
diff --git a/src/cmd/compile/internal/ir/func.go b/src/cmd/compile/internal/ir/func.go
index 20fe965711..18d0b023ad 100644
--- a/src/cmd/compile/internal/ir/func.go
+++ b/src/cmd/compile/internal/ir/func.go
@@ -9,6 +9,7 @@ import (
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/src"
+ "fmt"
)
// A Func corresponds to a single function in a Go program
@@ -39,14 +40,14 @@ import (
// constructs a fresh node.
//
// A method value (t.M) is represented by ODOTMETH/ODOTINTER
-// when it is called directly and by OCALLPART otherwise.
+// when it is called directly and by OMETHVALUE otherwise.
// These are like method expressions, except that for ODOTMETH/ODOTINTER,
// the method name is stored in Sym instead of Right.
-// Each OCALLPART ends up being implemented as a new
+// Each OMETHVALUE ends up being implemented as a new
// function, a bit like a closure, with its own ODCLFUNC.
-// The OCALLPART uses n.Func to record the linkage to
+// The OMETHVALUE uses n.Func to record the linkage to
// the generated ODCLFUNC, but there is no
-// pointer from the Func back to the OCALLPART.
+// pointer from the Func back to the OMETHVALUE.
type Func struct {
miniNode
Body Nodes
@@ -166,6 +167,11 @@ type Inline struct {
// another package is imported.
Dcl []*Name
Body []Node
+
+ // CanDelayResults reports whether it's safe for the inliner to delay
+ // initializing the result parameters until immediately before the
+ // "return" statement.
+ CanDelayResults bool
}
// A Mark represents a scope boundary.
@@ -190,13 +196,14 @@ const (
// true if closure inside a function; false if a simple function or a
// closure in a global variable initialization
funcIsHiddenClosure
+ funcIsDeadcodeClosure // true if closure is deadcode
funcHasDefer // contains a defer statement
funcNilCheckDisabled // disable nil checks when compiling this function
funcInlinabilityChecked // inliner has already determined whether the function is inlinable
funcExportInline // include inline body in export data
funcInstrumentBody // add race/msan instrumentation during SSA construction
funcOpenCodedDeferDisallowed // can't do open-coded defers
- funcClosureCalled // closure is only immediately called
+ funcClosureCalled // closure is only immediately called; used by escape analysis
)
type SymAndPos struct {
@@ -210,6 +217,7 @@ func (f *Func) ABIWrapper() bool { return f.flags&funcABIWrapper !
func (f *Func) Needctxt() bool { return f.flags&funcNeedctxt != 0 }
func (f *Func) ReflectMethod() bool { return f.flags&funcReflectMethod != 0 }
func (f *Func) IsHiddenClosure() bool { return f.flags&funcIsHiddenClosure != 0 }
+func (f *Func) IsDeadcodeClosure() bool { return f.flags&funcIsDeadcodeClosure != 0 }
func (f *Func) HasDefer() bool { return f.flags&funcHasDefer != 0 }
func (f *Func) NilCheckDisabled() bool { return f.flags&funcNilCheckDisabled != 0 }
func (f *Func) InlinabilityChecked() bool { return f.flags&funcInlinabilityChecked != 0 }
@@ -224,6 +232,7 @@ func (f *Func) SetABIWrapper(b bool) { f.flags.set(funcABIWrapper,
func (f *Func) SetNeedctxt(b bool) { f.flags.set(funcNeedctxt, b) }
func (f *Func) SetReflectMethod(b bool) { f.flags.set(funcReflectMethod, b) }
func (f *Func) SetIsHiddenClosure(b bool) { f.flags.set(funcIsHiddenClosure, b) }
+func (f *Func) SetIsDeadcodeClosure(b bool) { f.flags.set(funcIsDeadcodeClosure, b) }
func (f *Func) SetHasDefer(b bool) { f.flags.set(funcHasDefer, b) }
func (f *Func) SetNilCheckDisabled(b bool) { f.flags.set(funcNilCheckDisabled, b) }
func (f *Func) SetInlinabilityChecked(b bool) { f.flags.set(funcInlinabilityChecked, b) }
@@ -272,6 +281,17 @@ func PkgFuncName(f *Func) string {
var CurFunc *Func
+// WithFunc invokes do with CurFunc and base.Pos set to curfn and
+// curfn.Pos(), respectively, and then restores their previous values
+// before returning.
+func WithFunc(curfn *Func, do func()) {
+ oldfn, oldpos := CurFunc, base.Pos
+ defer func() { CurFunc, base.Pos = oldfn, oldpos }()
+
+ CurFunc, base.Pos = curfn, curfn.Pos()
+ do()
+}
+
func FuncSymName(s *types.Sym) string {
return s.Name + "·f"
}
@@ -279,7 +299,7 @@ func FuncSymName(s *types.Sym) string {
// MarkFunc marks a node as a function.
func MarkFunc(n *Name) {
if n.Op() != ONAME || n.Class != Pxxx {
- base.Fatalf("expected ONAME/Pxxx node, got %v", n)
+ base.FatalfAt(n.Pos(), "expected ONAME/Pxxx node, got %v (%v/%v)", n, n.Op(), n.Class)
}
n.Class = PFUNC
@@ -296,8 +316,8 @@ func ClosureDebugRuntimeCheck(clo *ClosureExpr) {
base.WarnfAt(clo.Pos(), "stack closure, captured vars = %v", clo.Func.ClosureVars)
}
}
- if base.Flag.CompilingRuntime && clo.Esc() == EscHeap {
- base.ErrorfAt(clo.Pos(), "heap-allocated closure, not allowed in runtime")
+ if base.Flag.CompilingRuntime && clo.Esc() == EscHeap && !clo.IsGoWrap {
+ base.ErrorfAt(clo.Pos(), "heap-allocated closure %s, not allowed in runtime", FuncName(clo.Func))
}
}
@@ -306,3 +326,109 @@ func ClosureDebugRuntimeCheck(clo *ClosureExpr) {
func IsTrivialClosure(clo *ClosureExpr) bool {
return len(clo.Func.ClosureVars) == 0
}
+
+// globClosgen is like Func.Closgen, but for the global scope.
+var globClosgen int32
+
+// closureName generates a new unique name for a closure within outerfn.
+func closureName(outerfn *Func) *types.Sym {
+ pkg := types.LocalPkg
+ outer := "glob."
+ prefix := "func"
+ gen := &globClosgen
+
+ if outerfn != nil {
+ if outerfn.OClosure != nil {
+ prefix = ""
+ }
+
+ pkg = outerfn.Sym().Pkg
+ outer = FuncName(outerfn)
+
+ // There may be multiple functions named "_". In those
+ // cases, we can't use their individual Closgens as it
+ // would lead to name clashes.
+ if !IsBlank(outerfn.Nname) {
+ gen = &outerfn.Closgen
+ }
+ }
+
+ *gen++
+ return pkg.Lookup(fmt.Sprintf("%s.%s%d", outer, prefix, *gen))
+}
+
+// NewClosureFunc creates a new Func to represent a function literal.
+// If hidden is true, then the closure is marked hidden (i.e., as a
+// function literal contained within another function, rather than a
+// package-scope variable initialization expression).
+func NewClosureFunc(pos src.XPos, hidden bool) *Func {
+ fn := NewFunc(pos)
+ fn.SetIsHiddenClosure(hidden)
+
+ fn.Nname = NewNameAt(pos, BlankNode.Sym())
+ fn.Nname.Func = fn
+ fn.Nname.Defn = fn
+
+ fn.OClosure = NewClosureExpr(pos, fn)
+
+ return fn
+}
+
+// NameClosure generates a unique for the given function literal,
+// which must have appeared within outerfn.
+func NameClosure(clo *ClosureExpr, outerfn *Func) {
+ fn := clo.Func
+ if fn.IsHiddenClosure() != (outerfn != nil) {
+ base.FatalfAt(clo.Pos(), "closure naming inconsistency: hidden %v, but outer %v", fn.IsHiddenClosure(), outerfn)
+ }
+
+ name := fn.Nname
+ if !IsBlank(name) {
+ base.FatalfAt(clo.Pos(), "closure already named: %v", name)
+ }
+
+ name.SetSym(closureName(outerfn))
+ MarkFunc(name)
+}
+
+// UseClosure checks that the ginen function literal has been setup
+// correctly, and then returns it as an expression.
+// It must be called after clo.Func.ClosureVars has been set.
+func UseClosure(clo *ClosureExpr, pkg *Package) Node {
+ fn := clo.Func
+ name := fn.Nname
+
+ if IsBlank(name) {
+ base.FatalfAt(fn.Pos(), "unnamed closure func: %v", fn)
+ }
+ // Caution: clo.Typecheck() is still 0 when UseClosure is called by
+ // tcClosure.
+ if fn.Typecheck() != 1 || name.Typecheck() != 1 {
+ base.FatalfAt(fn.Pos(), "missed typecheck: %v", fn)
+ }
+ if clo.Type() == nil || name.Type() == nil {
+ base.FatalfAt(fn.Pos(), "missing types: %v", fn)
+ }
+ if !types.Identical(clo.Type(), name.Type()) {
+ base.FatalfAt(fn.Pos(), "mismatched types: %v", fn)
+ }
+
+ if base.Flag.W > 1 {
+ s := fmt.Sprintf("new closure func: %v", fn)
+ Dump(s, fn)
+ }
+
+ if pkg != nil {
+ pkg.Decls = append(pkg.Decls, fn)
+ }
+
+ if false && IsTrivialClosure(clo) {
+ // TODO(mdempsky): Investigate if we can/should optimize this
+ // case. walkClosure already handles it later, but it could be
+ // useful to recognize earlier (e.g., it might allow multiple
+ // inlined calls to a function to share a common trivial closure
+ // func, rather than cloning it for each inlined call).
+ }
+
+ return clo
+}
diff --git a/src/cmd/compile/internal/ir/name.go b/src/cmd/compile/internal/ir/name.go
index b6c68bc5e0..a2eec05013 100644
--- a/src/cmd/compile/internal/ir/name.go
+++ b/src/cmd/compile/internal/ir/name.go
@@ -358,39 +358,74 @@ func (n *Name) Byval() bool {
return n.Canonical().flags&nameByval != 0
}
+// NewClosureVar returns a new closure variable for fn to refer to
+// outer variable n.
+func NewClosureVar(pos src.XPos, fn *Func, n *Name) *Name {
+ c := NewNameAt(pos, n.Sym())
+ c.Curfn = fn
+ c.Class = PAUTOHEAP
+ c.SetIsClosureVar(true)
+ c.Defn = n.Canonical()
+ c.Outer = n
+
+ c.SetType(n.Type())
+ c.SetTypecheck(n.Typecheck())
+
+ fn.ClosureVars = append(fn.ClosureVars, c)
+
+ return c
+}
+
+// NewHiddenParam returns a new hidden parameter for fn with the given
+// name and type.
+func NewHiddenParam(pos src.XPos, fn *Func, sym *types.Sym, typ *types.Type) *Name {
+ if fn.OClosure != nil {
+ base.FatalfAt(fn.Pos(), "cannot add hidden parameters to closures")
+ }
+
+ fn.SetNeedctxt(true)
+
+ // Create a fake parameter, disassociated from any real function, to
+ // pretend to capture.
+ fake := NewNameAt(pos, sym)
+ fake.Class = PPARAM
+ fake.SetType(typ)
+ fake.SetByval(true)
+
+ return NewClosureVar(pos, fn, fake)
+}
+
// CaptureName returns a Name suitable for referring to n from within function
// fn or from the package block if fn is nil. If n is a free variable declared
-// within a function that encloses fn, then CaptureName returns a closure
-// variable that refers to n and adds it to fn.ClosureVars. Otherwise, it simply
-// returns n.
+// within a function that encloses fn, then CaptureName returns the closure
+// variable that refers to n within fn, creating it if necessary.
+// Otherwise, it simply returns n.
func CaptureName(pos src.XPos, fn *Func, n *Name) *Name {
- if n.IsClosureVar() {
- base.FatalfAt(pos, "misuse of CaptureName on closure variable: %v", n)
- }
- if n.Op() != ONAME || n.Curfn == nil || n.Curfn == fn {
+ if n.Op() != ONAME || n.Curfn == nil {
return n // okay to use directly
}
- if fn == nil {
- base.FatalfAt(pos, "package-block reference to %v, declared in %v", n, n.Curfn)
+ if n.IsClosureVar() {
+ base.FatalfAt(pos, "misuse of CaptureName on closure variable: %v", n)
}
c := n.Innermost
- if c != nil && c.Curfn == fn {
+ if c == nil {
+ c = n
+ }
+ if c.Curfn == fn {
return c
}
+ if fn == nil {
+ base.FatalfAt(pos, "package-block reference to %v, declared in %v", n, n.Curfn)
+ }
+
// Do not have a closure var for the active closure yet; make one.
- c = NewNameAt(pos, n.Sym())
- c.Curfn = fn
- c.Class = PAUTOHEAP
- c.SetIsClosureVar(true)
- c.Defn = n
+ c = NewClosureVar(pos, fn, c)
// Link into list of active closure variables.
// Popped from list in FinishCaptureNames.
- c.Outer = n.Innermost
n.Innermost = c
- fn.ClosureVars = append(fn.ClosureVars, c)
return c
}
diff --git a/src/cmd/compile/internal/ir/node.go b/src/cmd/compile/internal/ir/node.go
index af559cc082..f071cb78ce 100644
--- a/src/cmd/compile/internal/ir/node.go
+++ b/src/cmd/compile/internal/ir/node.go
@@ -159,7 +159,6 @@ const (
OCALLFUNC // X(Args) (function call f(args))
OCALLMETH // X(Args) (direct method call x.Method(args))
OCALLINTER // X(Args) (interface method call x.Method(args))
- OCALLPART // X.Sel (method expression x.Method, not called)
OCAP // cap(X)
OCLOSE // close(X)
OCLOSURE // func Type { Func.Closure.Body } (func literal)
@@ -171,6 +170,7 @@ const (
OPTRLIT // &X (X is composite literal)
OCONV // Type(X) (type conversion)
OCONVIFACE // Type(X) (type conversion, to interface)
+ OCONVIDATA // Builds a data word to store X in an interface. Equivalent to IDATA(CONVIFACE(X)). Is an ir.ConvExpr.
OCONVNOP // Type(X) (type conversion, no effect)
OCOPY // copy(X, Y)
ODCL // var X (declares X of type X.Type)
@@ -237,6 +237,7 @@ const (
OSLICE3ARR // X[Low : High : Max] (X is pointer to array)
OSLICEHEADER // sliceheader{Ptr, Len, Cap} (Ptr is unsafe.Pointer, Len is length, Cap is capacity)
ORECOVER // recover()
+ ORECOVERFP // recover(Args) w/ explicit FP argument
ORECV // <-X
ORUNESTR // Type(X) (Type is string, X is rune)
OSELRECV2 // like OAS2: Lhs = Rhs where len(Lhs)=2, len(Rhs)=1, Rhs[0].Op = ORECV (appears as .Var of OCASE)
@@ -249,14 +250,16 @@ const (
OSIZEOF // unsafe.Sizeof(X)
OUNSAFEADD // unsafe.Add(X, Y)
OUNSAFESLICE // unsafe.Slice(X, Y)
- OMETHEXPR // method expression
+ OMETHEXPR // X(Args) (method expression T.Method(args), first argument is the method receiver)
+ OMETHVALUE // X.Sel (method expression t.Method, not called)
// statements
OBLOCK // { List } (block of code)
OBREAK // break [Label]
// OCASE: case List: Body (List==nil means default)
// For OTYPESW, List is a OTYPE node for the specified type (or OLITERAL
- // for nil), and, if a type-switch variable is specified, Rlist is an
+ // for nil) or an ODYNAMICTYPE indicating a runtime type for generics.
+ // If a type-switch variable is specified, Var is an
// ONAME for the version of the type-switch variable with the specified
// type.
OCASE
@@ -317,9 +320,16 @@ const (
OINLMARK // start of an inlined body, with file/line of caller. Xoffset is an index into the inline tree.
OLINKSYMOFFSET // offset within a name
+ // opcodes for generics
+ ODYNAMICDOTTYPE // x = i.(T) where T is a type parameter (or derived from a type parameter)
+ ODYNAMICDOTTYPE2 // x, ok = i.(T) where T is a type parameter (or derived from a type parameter)
+ ODYNAMICTYPE // a type node for type switches (represents a dynamic target type for a type switch)
+
// arch-specific opcodes
- OTAILCALL // tail call to another function
- OGETG // runtime.getg() (read g pointer)
+ OTAILCALL // tail call to another function
+ OGETG // runtime.getg() (read g pointer)
+ OGETCALLERPC // runtime.getcallerpc() (continuation PC in caller frame)
+ OGETCALLERSP // runtime.getcallersp() (stack pointer in caller frame)
OEND
)
@@ -436,18 +446,19 @@ func (s NameSet) Sorted(less func(*Name, *Name) bool) []*Name {
return res
}
-type PragmaFlag int16
+type PragmaFlag uint16
const (
// Func pragmas.
- Nointerface PragmaFlag = 1 << iota
- Noescape // func parameters don't escape
- Norace // func must not have race detector annotations
- Nosplit // func should not execute on separate stack
- Noinline // func should not be inlined
- NoCheckPtr // func should not be instrumented by checkptr
- CgoUnsafeArgs // treat a pointer to one arg as a pointer to them all
- UintptrEscapes // pointers converted to uintptr escape
+ Nointerface PragmaFlag = 1 << iota
+ Noescape // func parameters don't escape
+ Norace // func must not have race detector annotations
+ Nosplit // func should not execute on separate stack
+ Noinline // func should not be inlined
+ NoCheckPtr // func should not be instrumented by checkptr
+ CgoUnsafeArgs // treat a pointer to one arg as a pointer to them all
+ UintptrKeepAlive // pointers converted to uintptr must be kept alive (compiler internal only)
+ UintptrEscapes // pointers converted to uintptr escape
// Runtime-only func pragmas.
// See ../../../../runtime/README.md for detailed descriptions.
@@ -563,7 +574,7 @@ func OuterValue(n Node) Node {
for {
switch nn := n; nn.Op() {
case OXDOT:
- base.Fatalf("OXDOT in walk")
+ base.FatalfAt(n.Pos(), "OXDOT in walk: %v", n)
case ODOT:
nn := nn.(*SelectorExpr)
n = nn.X
diff --git a/src/cmd/compile/internal/ir/node_gen.go b/src/cmd/compile/internal/ir/node_gen.go
index 22855d7163..aa41c03beb 100644
--- a/src/cmd/compile/internal/ir/node_gen.go
+++ b/src/cmd/compile/internal/ir/node_gen.go
@@ -463,6 +463,62 @@ func (n *Decl) editChildren(edit func(Node) Node) {
}
}
+func (n *DynamicType) Format(s fmt.State, verb rune) { fmtNode(n, s, verb) }
+func (n *DynamicType) copy() Node {
+ c := *n
+ c.init = copyNodes(c.init)
+ return &c
+}
+func (n *DynamicType) doChildren(do func(Node) bool) bool {
+ if doNodes(n.init, do) {
+ return true
+ }
+ if n.X != nil && do(n.X) {
+ return true
+ }
+ if n.ITab != nil && do(n.ITab) {
+ return true
+ }
+ return false
+}
+func (n *DynamicType) editChildren(edit func(Node) Node) {
+ editNodes(n.init, edit)
+ if n.X != nil {
+ n.X = edit(n.X).(Node)
+ }
+ if n.ITab != nil {
+ n.ITab = edit(n.ITab).(Node)
+ }
+}
+
+func (n *DynamicTypeAssertExpr) Format(s fmt.State, verb rune) { fmtNode(n, s, verb) }
+func (n *DynamicTypeAssertExpr) copy() Node {
+ c := *n
+ c.init = copyNodes(c.init)
+ return &c
+}
+func (n *DynamicTypeAssertExpr) doChildren(do func(Node) bool) bool {
+ if doNodes(n.init, do) {
+ return true
+ }
+ if n.X != nil && do(n.X) {
+ return true
+ }
+ if n.T != nil && do(n.T) {
+ return true
+ }
+ return false
+}
+func (n *DynamicTypeAssertExpr) editChildren(edit func(Node) Node) {
+ editNodes(n.init, edit)
+ if n.X != nil {
+ n.X = edit(n.X).(Node)
+ }
+ if n.T != nil {
+ n.T = edit(n.T).(Node)
+ }
+}
+
func (n *ForStmt) Format(s fmt.State, verb rune) { fmtNode(n, s, verb) }
func (n *ForStmt) copy() Node {
c := *n
@@ -947,6 +1003,22 @@ func (n *RangeStmt) editChildren(edit func(Node) Node) {
}
}
+func (n *RawOrigExpr) Format(s fmt.State, verb rune) { fmtNode(n, s, verb) }
+func (n *RawOrigExpr) copy() Node {
+ c := *n
+ c.init = copyNodes(c.init)
+ return &c
+}
+func (n *RawOrigExpr) doChildren(do func(Node) bool) bool {
+ if doNodes(n.init, do) {
+ return true
+ }
+ return false
+}
+func (n *RawOrigExpr) editChildren(edit func(Node) Node) {
+ editNodes(n.init, edit)
+}
+
func (n *ResultExpr) Format(s fmt.State, verb rune) { fmtNode(n, s, verb) }
func (n *ResultExpr) copy() Node {
c := *n
diff --git a/src/cmd/compile/internal/ir/op_string.go b/src/cmd/compile/internal/ir/op_string.go
index 405a0c6b3c..b8cee71818 100644
--- a/src/cmd/compile/internal/ir/op_string.go
+++ b/src/cmd/compile/internal/ir/op_string.go
@@ -41,18 +41,18 @@ func _() {
_ = x[OCALLFUNC-30]
_ = x[OCALLMETH-31]
_ = x[OCALLINTER-32]
- _ = x[OCALLPART-33]
- _ = x[OCAP-34]
- _ = x[OCLOSE-35]
- _ = x[OCLOSURE-36]
- _ = x[OCOMPLIT-37]
- _ = x[OMAPLIT-38]
- _ = x[OSTRUCTLIT-39]
- _ = x[OARRAYLIT-40]
- _ = x[OSLICELIT-41]
- _ = x[OPTRLIT-42]
- _ = x[OCONV-43]
- _ = x[OCONVIFACE-44]
+ _ = x[OCAP-33]
+ _ = x[OCLOSE-34]
+ _ = x[OCLOSURE-35]
+ _ = x[OCOMPLIT-36]
+ _ = x[OMAPLIT-37]
+ _ = x[OSTRUCTLIT-38]
+ _ = x[OARRAYLIT-39]
+ _ = x[OSLICELIT-40]
+ _ = x[OPTRLIT-41]
+ _ = x[OCONV-42]
+ _ = x[OCONVIFACE-43]
+ _ = x[OCONVIDATA-44]
_ = x[OCONVNOP-45]
_ = x[OCOPY-46]
_ = x[ODCL-47]
@@ -109,65 +109,72 @@ func _() {
_ = x[OSLICE3ARR-98]
_ = x[OSLICEHEADER-99]
_ = x[ORECOVER-100]
- _ = x[ORECV-101]
- _ = x[ORUNESTR-102]
- _ = x[OSELRECV2-103]
- _ = x[OIOTA-104]
- _ = x[OREAL-105]
- _ = x[OIMAG-106]
- _ = x[OCOMPLEX-107]
- _ = x[OALIGNOF-108]
- _ = x[OOFFSETOF-109]
- _ = x[OSIZEOF-110]
- _ = x[OUNSAFEADD-111]
- _ = x[OUNSAFESLICE-112]
- _ = x[OMETHEXPR-113]
- _ = x[OBLOCK-114]
- _ = x[OBREAK-115]
- _ = x[OCASE-116]
- _ = x[OCONTINUE-117]
- _ = x[ODEFER-118]
- _ = x[OFALL-119]
- _ = x[OFOR-120]
- _ = x[OFORUNTIL-121]
- _ = x[OGOTO-122]
- _ = x[OIF-123]
- _ = x[OLABEL-124]
- _ = x[OGO-125]
- _ = x[ORANGE-126]
- _ = x[ORETURN-127]
- _ = x[OSELECT-128]
- _ = x[OSWITCH-129]
- _ = x[OTYPESW-130]
- _ = x[OFUNCINST-131]
- _ = x[OTCHAN-132]
- _ = x[OTMAP-133]
- _ = x[OTSTRUCT-134]
- _ = x[OTINTER-135]
- _ = x[OTFUNC-136]
- _ = x[OTARRAY-137]
- _ = x[OTSLICE-138]
- _ = x[OINLCALL-139]
- _ = x[OEFACE-140]
- _ = x[OITAB-141]
- _ = x[OIDATA-142]
- _ = x[OSPTR-143]
- _ = x[OCFUNC-144]
- _ = x[OCHECKNIL-145]
- _ = x[OVARDEF-146]
- _ = x[OVARKILL-147]
- _ = x[OVARLIVE-148]
- _ = x[ORESULT-149]
- _ = x[OINLMARK-150]
- _ = x[OLINKSYMOFFSET-151]
- _ = x[OTAILCALL-152]
- _ = x[OGETG-153]
- _ = x[OEND-154]
+ _ = x[ORECOVERFP-101]
+ _ = x[ORECV-102]
+ _ = x[ORUNESTR-103]
+ _ = x[OSELRECV2-104]
+ _ = x[OIOTA-105]
+ _ = x[OREAL-106]
+ _ = x[OIMAG-107]
+ _ = x[OCOMPLEX-108]
+ _ = x[OALIGNOF-109]
+ _ = x[OOFFSETOF-110]
+ _ = x[OSIZEOF-111]
+ _ = x[OUNSAFEADD-112]
+ _ = x[OUNSAFESLICE-113]
+ _ = x[OMETHEXPR-114]
+ _ = x[OMETHVALUE-115]
+ _ = x[OBLOCK-116]
+ _ = x[OBREAK-117]
+ _ = x[OCASE-118]
+ _ = x[OCONTINUE-119]
+ _ = x[ODEFER-120]
+ _ = x[OFALL-121]
+ _ = x[OFOR-122]
+ _ = x[OFORUNTIL-123]
+ _ = x[OGOTO-124]
+ _ = x[OIF-125]
+ _ = x[OLABEL-126]
+ _ = x[OGO-127]
+ _ = x[ORANGE-128]
+ _ = x[ORETURN-129]
+ _ = x[OSELECT-130]
+ _ = x[OSWITCH-131]
+ _ = x[OTYPESW-132]
+ _ = x[OFUNCINST-133]
+ _ = x[OTCHAN-134]
+ _ = x[OTMAP-135]
+ _ = x[OTSTRUCT-136]
+ _ = x[OTINTER-137]
+ _ = x[OTFUNC-138]
+ _ = x[OTARRAY-139]
+ _ = x[OTSLICE-140]
+ _ = x[OINLCALL-141]
+ _ = x[OEFACE-142]
+ _ = x[OITAB-143]
+ _ = x[OIDATA-144]
+ _ = x[OSPTR-145]
+ _ = x[OCFUNC-146]
+ _ = x[OCHECKNIL-147]
+ _ = x[OVARDEF-148]
+ _ = x[OVARKILL-149]
+ _ = x[OVARLIVE-150]
+ _ = x[ORESULT-151]
+ _ = x[OINLMARK-152]
+ _ = x[OLINKSYMOFFSET-153]
+ _ = x[ODYNAMICDOTTYPE-154]
+ _ = x[ODYNAMICDOTTYPE2-155]
+ _ = x[ODYNAMICTYPE-156]
+ _ = x[OTAILCALL-157]
+ _ = x[OGETG-158]
+ _ = x[OGETCALLERPC-159]
+ _ = x[OGETCALLERSP-160]
+ _ = x[OEND-161]
}
-const _Op_name = "XXXNAMENONAMETYPEPACKLITERALNILADDSUBORXORADDSTRADDRANDANDAPPENDBYTES2STRBYTES2STRTMPRUNES2STRSTR2BYTESSTR2BYTESTMPSTR2RUNESSLICE2ARRPTRASAS2AS2DOTTYPEAS2FUNCAS2MAPRAS2RECVASOPCALLCALLFUNCCALLMETHCALLINTERCALLPARTCAPCLOSECLOSURECOMPLITMAPLITSTRUCTLITARRAYLITSLICELITPTRLITCONVCONVIFACECONVNOPCOPYDCLDCLFUNCDCLCONSTDCLTYPEDELETEDOTDOTPTRDOTMETHDOTINTERXDOTDOTTYPEDOTTYPE2EQNELTLEGEGTDEREFINDEXINDEXMAPKEYSTRUCTKEYLENMAKEMAKECHANMAKEMAPMAKESLICEMAKESLICECOPYMULDIVMODLSHRSHANDANDNOTNEWNOTBITNOTPLUSNEGORORPANICPRINTPRINTNPARENSENDSLICESLICEARRSLICESTRSLICE3SLICE3ARRSLICEHEADERRECOVERRECVRUNESTRSELRECV2IOTAREALIMAGCOMPLEXALIGNOFOFFSETOFSIZEOFUNSAFEADDUNSAFESLICEMETHEXPRBLOCKBREAKCASECONTINUEDEFERFALLFORFORUNTILGOTOIFLABELGORANGERETURNSELECTSWITCHTYPESWFUNCINSTTCHANTMAPTSTRUCTTINTERTFUNCTARRAYTSLICEINLCALLEFACEITABIDATASPTRCFUNCCHECKNILVARDEFVARKILLVARLIVERESULTINLMARKLINKSYMOFFSETTAILCALLGETGEND"
+const _Op_name = "XXXNAMENONAMETYPEPACKLITERALNILADDSUBORXORADDSTRADDRANDANDAPPENDBYTES2STRBYTES2STRTMPRUNES2STRSTR2BYTESSTR2BYTESTMPSTR2RUNESSLICE2ARRPTRASAS2AS2DOTTYPEAS2FUNCAS2MAPRAS2RECVASOPCALLCALLFUNCCALLMETHCALLINTERCAPCLOSECLOSURECOMPLITMAPLITSTRUCTLITARRAYLITSLICELITPTRLITCONVCONVIFACECONVIDATACONVNOPCOPYDCLDCLFUNCDCLCONSTDCLTYPEDELETEDOTDOTPTRDOTMETHDOTINTERXDOTDOTTYPEDOTTYPE2EQNELTLEGEGTDEREFINDEXINDEXMAPKEYSTRUCTKEYLENMAKEMAKECHANMAKEMAPMAKESLICEMAKESLICECOPYMULDIVMODLSHRSHANDANDNOTNEWNOTBITNOTPLUSNEGORORPANICPRINTPRINTNPARENSENDSLICESLICEARRSLICESTRSLICE3SLICE3ARRSLICEHEADERRECOVERRECOVERFPRECVRUNESTRSELRECV2IOTAREALIMAGCOMPLEXALIGNOFOFFSETOFSIZEOFUNSAFEADDUNSAFESLICEMETHEXPRMETHVALUEBLOCKBREAKCASECONTINUEDEFERFALLFORFORUNTILGOTOIFLABELGORANGERETURNSELECTSWITCHTYPESWFUNCINSTTCHANTMAPTSTRUCTTINTERTFUNCTARRAYTSLICEINLCALLEFACEITABIDATASPTRCFUNCCHECKNILVARDEFVARKILLVARLIVERESULTINLMARKLINKSYMOFFSETDYNAMICDOTTYPEDYNAMICDOTTYPE2DYNAMICTYPETAILCALLGETGGETCALLERPCGETCALLERSPEND"
-var _Op_index = [...]uint16{0, 3, 7, 13, 17, 21, 28, 31, 34, 37, 39, 42, 48, 52, 58, 64, 73, 85, 94, 103, 115, 124, 136, 138, 141, 151, 158, 165, 172, 176, 180, 188, 196, 205, 213, 216, 221, 228, 235, 241, 250, 258, 266, 272, 276, 285, 292, 296, 299, 306, 314, 321, 327, 330, 336, 343, 351, 355, 362, 370, 372, 374, 376, 378, 380, 382, 387, 392, 400, 403, 412, 415, 419, 427, 434, 443, 456, 459, 462, 465, 468, 471, 474, 480, 483, 486, 492, 496, 499, 503, 508, 513, 519, 524, 528, 533, 541, 549, 555, 564, 575, 582, 586, 593, 601, 605, 609, 613, 620, 627, 635, 641, 650, 661, 669, 674, 679, 683, 691, 696, 700, 703, 711, 715, 717, 722, 724, 729, 735, 741, 747, 753, 761, 766, 770, 777, 783, 788, 794, 800, 807, 812, 816, 821, 825, 830, 838, 844, 851, 858, 864, 871, 884, 892, 896, 899}
+var _Op_index = [...]uint16{0, 3, 7, 13, 17, 21, 28, 31, 34, 37, 39, 42, 48, 52, 58, 64, 73, 85, 94, 103, 115, 124, 136, 138, 141, 151, 158, 165, 172, 176, 180, 188, 196, 205, 208, 213, 220, 227, 233, 242, 250, 258, 264, 268, 277, 286, 293, 297, 300, 307, 315, 322, 328, 331, 337, 344, 352, 356, 363, 371, 373, 375, 377, 379, 381, 383, 388, 393, 401, 404, 413, 416, 420, 428, 435, 444, 457, 460, 463, 466, 469, 472, 475, 481, 484, 487, 493, 497, 500, 504, 509, 514, 520, 525, 529, 534, 542, 550, 556, 565, 576, 583, 592, 596, 603, 611, 615, 619, 623, 630, 637, 645, 651, 660, 671, 679, 688, 693, 698, 702, 710, 715, 719, 722, 730, 734, 736, 741, 743, 748, 754, 760, 766, 772, 780, 785, 789, 796, 802, 807, 813, 819, 826, 831, 835, 840, 844, 849, 857, 863, 870, 877, 883, 890, 903, 917, 932, 943, 951, 955, 966, 977, 980}
func (i Op) String() string {
if i >= Op(len(_Op_index)-1) {
diff --git a/src/cmd/compile/internal/ir/package.go b/src/cmd/compile/internal/ir/package.go
index e4b93d113e..3896e2b91b 100644
--- a/src/cmd/compile/internal/ir/package.go
+++ b/src/cmd/compile/internal/ir/package.go
@@ -32,7 +32,4 @@ type Package struct {
// Exported (or re-exported) symbols.
Exports []*Name
-
- // Map from function names of stencils to already-created stencils.
- Stencils map[*types.Sym]*Func
}
diff --git a/src/cmd/compile/internal/ir/scc.go b/src/cmd/compile/internal/ir/scc.go
index 83c6074170..2cfceaa1f6 100644
--- a/src/cmd/compile/internal/ir/scc.go
+++ b/src/cmd/compile/internal/ir/scc.go
@@ -90,7 +90,7 @@ func (v *bottomUpVisitor) visit(n *Func) uint32 {
if n := n.(*Name); n.Class == PFUNC {
do(n.Defn)
}
- case ODOTMETH, OCALLPART, OMETHEXPR:
+ case ODOTMETH, OMETHVALUE, OMETHEXPR:
if fn := MethodExprName(n); fn != nil {
do(fn.Defn)
}
diff --git a/src/cmd/compile/internal/ir/stmt.go b/src/cmd/compile/internal/ir/stmt.go
index 8115012f97..69a74b9fdd 100644
--- a/src/cmd/compile/internal/ir/stmt.go
+++ b/src/cmd/compile/internal/ir/stmt.go
@@ -244,7 +244,7 @@ func NewGoDeferStmt(pos src.XPos, op Op, call Node) *GoDeferStmt {
return n
}
-// A IfStmt is a return statement: if Init; Cond { Then } else { Else }.
+// An IfStmt is a return statement: if Init; Cond { Body } else { Else }.
type IfStmt struct {
miniStmt
Cond Node
diff --git a/src/cmd/compile/internal/ir/type.go b/src/cmd/compile/internal/ir/type.go
index a903ea8cd4..63dd673dcd 100644
--- a/src/cmd/compile/internal/ir/type.go
+++ b/src/cmd/compile/internal/ir/type.go
@@ -300,11 +300,36 @@ func (n *typeNode) CanBeNtype() {}
// TypeNode returns the Node representing the type t.
func TypeNode(t *types.Type) Ntype {
+ return TypeNodeAt(src.NoXPos, t)
+}
+
+// TypeNodeAt is like TypeNode, but allows specifying the position
+// information if a new OTYPE needs to be constructed.
+//
+// Deprecated: Use TypeNode instead. For typical use, the position for
+// an anonymous OTYPE node should not matter. However, TypeNodeAt is
+// available for use with toolstash -cmp to refactor existing code
+// that is sensitive to OTYPE position.
+func TypeNodeAt(pos src.XPos, t *types.Type) Ntype {
if n := t.Obj(); n != nil {
if n.Type() != t {
base.Fatalf("type skew: %v has type %v, but expected %v", n, n.Type(), t)
}
return n.(Ntype)
}
- return newTypeNode(src.NoXPos, t)
+ return newTypeNode(pos, t)
+}
+
+// A DynamicType represents the target type in a type switch.
+type DynamicType struct {
+ miniExpr
+ X Node // a *runtime._type for the targeted type
+ ITab Node // for type switches from nonempty interfaces to non-interfaces, this is the itab for that pair.
+}
+
+func NewDynamicType(pos src.XPos, x Node) *DynamicType {
+ n := &DynamicType{X: x}
+ n.pos = pos
+ n.op = ODYNAMICTYPE
+ return n
}
diff --git a/src/cmd/compile/internal/ir/val.go b/src/cmd/compile/internal/ir/val.go
index 03c320e205..bfe7d2bb43 100644
--- a/src/cmd/compile/internal/ir/val.go
+++ b/src/cmd/compile/internal/ir/val.go
@@ -66,7 +66,7 @@ func Float64Val(v constant.Value) float64 {
func AssertValidTypeForConst(t *types.Type, v constant.Value) {
if !ValidTypeForConst(t, v) {
- base.Fatalf("%v does not represent %v", t, v)
+ base.Fatalf("%v (%v) does not represent %v (%v)", t, t.Kind(), v, v.Kind())
}
}
diff --git a/src/cmd/compile/internal/liveness/plive.go b/src/cmd/compile/internal/liveness/plive.go
index f5c2ef7709..2705eac4f7 100644
--- a/src/cmd/compile/internal/liveness/plive.go
+++ b/src/cmd/compile/internal/liveness/plive.go
@@ -1082,6 +1082,10 @@ func (lv *liveness) showlive(v *ssa.Value, live bitvec.BitVec) {
if base.Flag.Live == 0 || ir.FuncName(lv.fn) == "init" || strings.HasPrefix(ir.FuncName(lv.fn), ".") {
return
}
+ if lv.fn.Wrapper() || lv.fn.Dupok() {
+ // Skip reporting liveness information for compiler-generated wrappers.
+ return
+ }
if !(v == nil || v.Op.IsCall()) {
// Historically we only printed this information at
// calls. Keep doing so.
diff --git a/src/cmd/compile/internal/logopt/logopt_test.go b/src/cmd/compile/internal/logopt/logopt_test.go
index 71976174b0..902cbc8091 100644
--- a/src/cmd/compile/internal/logopt/logopt_test.go
+++ b/src/cmd/compile/internal/logopt/logopt_test.go
@@ -209,7 +209,7 @@ func s15a8(x *[15]int64) [15]int64 {
want(t, slogged, `{"range":{"start":{"line":11,"character":6},"end":{"line":11,"character":6}},"severity":3,"code":"isInBounds","source":"go compiler","message":""}`)
want(t, slogged, `{"range":{"start":{"line":7,"character":6},"end":{"line":7,"character":6}},"severity":3,"code":"canInlineFunction","source":"go compiler","message":"cost: 35"}`)
// escape analysis explanation
- want(t, slogged, `{"range":{"start":{"line":7,"character":13},"end":{"line":7,"character":13}},"severity":3,"code":"leak","source":"go compiler","message":"parameter z leaks to ~r2 with derefs=0",`+
+ want(t, slogged, `{"range":{"start":{"line":7,"character":13},"end":{"line":7,"character":13}},"severity":3,"code":"leak","source":"go compiler","message":"parameter z leaks to ~r0 with derefs=0",`+
`"relatedInformation":[`+
`{"location":{"uri":"file://tmpdir/file.go","range":{"start":{"line":9,"character":13},"end":{"line":9,"character":13}}},"message":"escflow: flow: y = z:"},`+
`{"location":{"uri":"file://tmpdir/file.go","range":{"start":{"line":9,"character":13},"end":{"line":9,"character":13}}},"message":"escflow: from y := z (assign-pair)"},`+
@@ -220,8 +220,8 @@ func s15a8(x *[15]int64) [15]int64 {
`{"location":{"uri":"file://tmpdir/file.go","range":{"start":{"line":9,"character":13},"end":{"line":9,"character":13}}},"message":"escflow: from \u0026y.b (address-of)"},`+
`{"location":{"uri":"file://tmpdir/file.go","range":{"start":{"line":4,"character":9},"end":{"line":4,"character":9}}},"message":"inlineLoc"},`+
`{"location":{"uri":"file://tmpdir/file.go","range":{"start":{"line":9,"character":13},"end":{"line":9,"character":13}}},"message":"escflow: from ~R0 = \u0026y.b (assign-pair)"},`+
- `{"location":{"uri":"file://tmpdir/file.go","range":{"start":{"line":9,"character":3},"end":{"line":9,"character":3}}},"message":"escflow: flow: ~r2 = ~R0:"},`+
- `{"location":{"uri":"file://tmpdir/file.go","range":{"start":{"line":9,"character":3},"end":{"line":9,"character":3}}},"message":"escflow: from return (*int)(~R0) (return)"}]}`)
+ `{"location":{"uri":"file://tmpdir/file.go","range":{"start":{"line":9,"character":3},"end":{"line":9,"character":3}}},"message":"escflow: flow: ~r0 = ~R0:"},`+
+ `{"location":{"uri":"file://tmpdir/file.go","range":{"start":{"line":9,"character":3},"end":{"line":9,"character":3}}},"message":"escflow: from return ~R0 (return)"}]}`)
})
}
diff --git a/src/cmd/compile/internal/mips/galign.go b/src/cmd/compile/internal/mips/galign.go
index f892923ba0..4e6897042e 100644
--- a/src/cmd/compile/internal/mips/galign.go
+++ b/src/cmd/compile/internal/mips/galign.go
@@ -21,7 +21,6 @@ func Init(arch *ssagen.ArchInfo) {
arch.SoftFloat = (buildcfg.GOMIPS == "softfloat")
arch.ZeroRange = zerorange
arch.Ginsnop = ginsnop
- arch.Ginsnopdefer = ginsnop
arch.SSAMarkMoves = func(s *ssagen.State, b *ssa.Block) {}
arch.SSAGenValue = ssaGenValue
arch.SSAGenBlock = ssaGenBlock
diff --git a/src/cmd/compile/internal/mips64/galign.go b/src/cmd/compile/internal/mips64/galign.go
index af81366e51..412bc71aab 100644
--- a/src/cmd/compile/internal/mips64/galign.go
+++ b/src/cmd/compile/internal/mips64/galign.go
@@ -21,7 +21,6 @@ func Init(arch *ssagen.ArchInfo) {
arch.SoftFloat = buildcfg.GOMIPS64 == "softfloat"
arch.ZeroRange = zerorange
arch.Ginsnop = ginsnop
- arch.Ginsnopdefer = ginsnop
arch.SSAMarkMoves = func(s *ssagen.State, b *ssa.Block) {}
arch.SSAGenValue = ssaGenValue
diff --git a/src/cmd/compile/internal/noder/codes.go b/src/cmd/compile/internal/noder/codes.go
new file mode 100644
index 0000000000..f8cb7729ac
--- /dev/null
+++ b/src/cmd/compile/internal/noder/codes.go
@@ -0,0 +1,124 @@
+// UNREVIEWED
+
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package noder
+
+type code interface {
+ marker() syncMarker
+ value() int
+}
+
+type codeVal int
+
+func (c codeVal) marker() syncMarker { return syncVal }
+func (c codeVal) value() int { return int(c) }
+
+const (
+ valBool codeVal = iota
+ valString
+ valInt64
+ valBigInt
+ valBigRat
+ valBigFloat
+)
+
+type codeType int
+
+func (c codeType) marker() syncMarker { return syncType }
+func (c codeType) value() int { return int(c) }
+
+const (
+ typeBasic codeType = iota
+ typeNamed
+ typePointer
+ typeSlice
+ typeArray
+ typeChan
+ typeMap
+ typeSignature
+ typeStruct
+ typeInterface
+ typeUnion
+ typeTypeParam
+)
+
+type codeObj int
+
+func (c codeObj) marker() syncMarker { return syncCodeObj }
+func (c codeObj) value() int { return int(c) }
+
+const (
+ objAlias codeObj = iota
+ objConst
+ objType
+ objFunc
+ objVar
+ objStub
+)
+
+type codeStmt int
+
+func (c codeStmt) marker() syncMarker { return syncStmt1 }
+func (c codeStmt) value() int { return int(c) }
+
+const (
+ stmtEnd codeStmt = iota
+ stmtLabel
+ stmtBlock
+ stmtExpr
+ stmtSend
+ stmtAssign
+ stmtAssignOp
+ stmtIncDec
+ stmtBranch
+ stmtCall
+ stmtReturn
+ stmtIf
+ stmtFor
+ stmtSwitch
+ stmtSelect
+
+ // TODO(mdempsky): Remove after we don't care about toolstash -cmp.
+ stmtTypeDeclHack
+)
+
+type codeExpr int
+
+func (c codeExpr) marker() syncMarker { return syncExpr }
+func (c codeExpr) value() int { return int(c) }
+
+// TODO(mdempsky): Split expr into addr, for lvalues.
+const (
+ exprNone codeExpr = iota
+ exprConst
+ exprType // type expression
+ exprLocal // local variable
+ exprName // global variable or function
+ exprBlank
+ exprCompLit
+ exprFuncLit
+ exprSelector
+ exprIndex
+ exprSlice
+ exprAssert
+ exprUnaryOp
+ exprBinaryOp
+ exprCall
+ exprConvert
+)
+
+type codeDecl int
+
+func (c codeDecl) marker() syncMarker { return syncDecl }
+func (c codeDecl) value() int { return int(c) }
+
+const (
+ declEnd codeDecl = iota
+ declFunc
+ declMethod
+ declVar
+ declOther
+)
diff --git a/src/cmd/compile/internal/noder/decl.go b/src/cmd/compile/internal/noder/decl.go
index 4ca2eb4740..b23dd47600 100644
--- a/src/cmd/compile/internal/noder/decl.go
+++ b/src/cmd/compile/internal/noder/decl.go
@@ -18,43 +18,48 @@ import (
// TODO(mdempsky): Skip blank declarations? Probably only safe
// for declarations without pragmas.
-func (g *irgen) decls(decls []syntax.Decl) []ir.Node {
- var res ir.Nodes
+func (g *irgen) decls(res *ir.Nodes, decls []syntax.Decl) {
for _, decl := range decls {
switch decl := decl.(type) {
case *syntax.ConstDecl:
- g.constDecl(&res, decl)
+ g.constDecl(res, decl)
case *syntax.FuncDecl:
- g.funcDecl(&res, decl)
+ g.funcDecl(res, decl)
case *syntax.TypeDecl:
if ir.CurFunc == nil {
continue // already handled in irgen.generate
}
- g.typeDecl(&res, decl)
+ g.typeDecl(res, decl)
case *syntax.VarDecl:
- g.varDecl(&res, decl)
+ g.varDecl(res, decl)
default:
g.unhandled("declaration", decl)
}
}
- return res
}
func (g *irgen) importDecl(p *noder, decl *syntax.ImportDecl) {
- // TODO(mdempsky): Merge with gcimports so we don't have to import
- // packages twice.
-
g.pragmaFlags(decl.Pragma, 0)
- ipkg := importfile(decl)
- if ipkg == ir.Pkgs.Unsafe {
+ // Get the imported package's path, as resolved already by types2
+ // and gcimporter. This is the same path as would be computed by
+ // parseImportPath.
+ switch pkgNameOf(g.info, decl).Imported().Path() {
+ case "unsafe":
p.importedUnsafe = true
- }
- if ipkg.Path == "embed" {
+ case "embed":
p.importedEmbed = true
}
}
+// pkgNameOf returns the PkgName associated with the given ImportDecl.
+func pkgNameOf(info *types2.Info, decl *syntax.ImportDecl) *types2.PkgName {
+ if name := decl.LocalPkgName; name != nil {
+ return info.Defs[name].(*types2.PkgName)
+ }
+ return info.Implicits[decl].(*types2.PkgName)
+}
+
func (g *irgen) constDecl(out *ir.Nodes, decl *syntax.ConstDecl) {
g.pragmaFlags(decl.Pragma, 0)
@@ -90,27 +95,54 @@ func (g *irgen) funcDecl(out *ir.Nodes, decl *syntax.FuncDecl) {
if fn.Pragma&ir.Systemstack != 0 && fn.Pragma&ir.Nosplit != 0 {
base.ErrorfAt(fn.Pos(), "go:nosplit and go:systemstack cannot be combined")
}
+ if fn.Pragma&ir.Nointerface != 0 {
+ // Propagate //go:nointerface from Func.Pragma to Field.Nointerface.
+ // This is a bit roundabout, but this is the earliest point where we've
+ // processed the function's pragma flags, and we've also already created
+ // the Fields to represent the receiver's method set.
+ if recv := fn.Type().Recv(); recv != nil {
+ typ := types.ReceiverBaseType(recv.Type)
+ if typ.OrigSym != nil {
+ // For a generic method, we mark the methods on the
+ // base generic type, since those are the methods
+ // that will be stenciled.
+ typ = typ.OrigSym.Def.Type()
+ }
+ meth := typecheck.Lookdot1(fn, typecheck.Lookup(decl.Name.Value), typ, typ.Methods(), 0)
+ meth.SetNointerface(true)
+ }
+ }
if decl.Name.Value == "init" && decl.Recv == nil {
g.target.Inits = append(g.target.Inits, fn)
}
- g.funcBody(fn, decl.Recv, decl.Type, decl.Body)
+ g.later(func() {
+ if fn.Type().HasTParam() {
+ g.topFuncIsGeneric = true
+ }
+ g.funcBody(fn, decl.Recv, decl.Type, decl.Body)
+ g.topFuncIsGeneric = false
+ if fn.Type().HasTParam() && fn.Body != nil {
+ // Set pointers to the dcls/body of a generic function/method in
+ // the Inl struct, so it is marked for export, is available for
+ // stenciling, and works with Inline_Flood().
+ fn.Inl = &ir.Inline{
+ Cost: 1,
+ Dcl: fn.Dcl,
+ Body: fn.Body,
+ }
+ }
- out.Append(fn)
+ out.Append(fn)
+ })
}
func (g *irgen) typeDecl(out *ir.Nodes, decl *syntax.TypeDecl) {
if decl.Alias {
name, _ := g.def(decl.Name)
g.pragmaFlags(decl.Pragma, 0)
-
- // TODO(mdempsky): This matches how typecheckdef marks aliases for
- // export, but this won't generalize to exporting function-scoped
- // type aliases. We should maybe just use n.Alias() instead.
- if ir.CurFunc == nil {
- name.Sym().Def = ir.TypeNode(name.Type())
- }
+ assert(name.Alias()) // should be set by irgen.obj
out.Append(ir.NewDecl(g.pos(decl), ir.ODCLTYPE, name))
return
@@ -154,11 +186,15 @@ func (g *irgen) typeDecl(out *ir.Nodes, decl *syntax.TypeDecl) {
// [mdempsky: Subtleties like these are why I always vehemently
// object to new type pragmas.]
ntyp.SetUnderlying(g.typeExpr(decl.Type))
- if len(decl.TParamList) > 0 {
- // Set HasTParam if there are any tparams, even if no tparams are
- // used in the type itself (e.g., if it is an empty struct, or no
- // fields in the struct use the tparam).
- ntyp.SetHasTParam(true)
+
+ tparams := otyp.(*types2.Named).TParams()
+ if n := tparams.Len(); n > 0 {
+ rparams := make([]*types.Type, n)
+ for i := range rparams {
+ rparams[i] = g.typ(tparams.At(i))
+ }
+ // This will set hasTParam flag if any rparams are not concrete types.
+ ntyp.SetRParams(rparams)
}
types.ResumeCheckSize()
@@ -182,7 +218,6 @@ func (g *irgen) varDecl(out *ir.Nodes, decl *syntax.VarDecl) {
for i, name := range decl.NameList {
names[i], _ = g.def(name)
}
- values := g.exprList(decl.Values)
if decl.Pragma != nil {
pragma := decl.Pragma.(*pragmas)
@@ -191,44 +226,57 @@ func (g *irgen) varDecl(out *ir.Nodes, decl *syntax.VarDecl) {
g.reportUnused(pragma)
}
- var as2 *ir.AssignListStmt
- if len(values) != 0 && len(names) != len(values) {
- as2 = ir.NewAssignListStmt(pos, ir.OAS2, make([]ir.Node, len(names)), values)
- }
+ do := func() {
+ values := g.exprList(decl.Values)
- for i, name := range names {
- if ir.CurFunc != nil {
- out.Append(ir.NewDecl(pos, ir.ODCL, name))
+ var as2 *ir.AssignListStmt
+ if len(values) != 0 && len(names) != len(values) {
+ as2 = ir.NewAssignListStmt(pos, ir.OAS2, make([]ir.Node, len(names)), values)
}
- if as2 != nil {
- as2.Lhs[i] = name
- name.Defn = as2
- } else {
- as := ir.NewAssignStmt(pos, name, nil)
- if len(values) != 0 {
- as.Y = values[i]
- name.Defn = as
- } else if ir.CurFunc == nil {
- name.Defn = as
- }
- lhs := []ir.Node{as.X}
- rhs := []ir.Node{}
- if as.Y != nil {
- rhs = []ir.Node{as.Y}
+
+ for i, name := range names {
+ if ir.CurFunc != nil {
+ out.Append(ir.NewDecl(pos, ir.ODCL, name))
}
- transformAssign(as, lhs, rhs)
- as.X = lhs[0]
- if as.Y != nil {
- as.Y = rhs[0]
+ if as2 != nil {
+ as2.Lhs[i] = name
+ name.Defn = as2
+ } else {
+ as := ir.NewAssignStmt(pos, name, nil)
+ if len(values) != 0 {
+ as.Y = values[i]
+ name.Defn = as
+ } else if ir.CurFunc == nil {
+ name.Defn = as
+ }
+ lhs := []ir.Node{as.X}
+ rhs := []ir.Node{}
+ if as.Y != nil {
+ rhs = []ir.Node{as.Y}
+ }
+ transformAssign(as, lhs, rhs)
+ as.X = lhs[0]
+ if as.Y != nil {
+ as.Y = rhs[0]
+ }
+ as.SetTypecheck(1)
+ out.Append(as)
}
- as.SetTypecheck(1)
- out.Append(as)
+ }
+ if as2 != nil {
+ transformAssign(as2, as2.Lhs, as2.Rhs)
+ as2.SetTypecheck(1)
+ out.Append(as2)
}
}
- if as2 != nil {
- transformAssign(as2, as2.Lhs, as2.Rhs)
- as2.SetTypecheck(1)
- out.Append(as2)
+
+ // If we're within a function, we need to process the assignment
+ // part of the variable declaration right away. Otherwise, we leave
+ // it to be handled after all top-level declarations are processed.
+ if ir.CurFunc != nil {
+ do()
+ } else {
+ g.later(do)
}
}
diff --git a/src/cmd/compile/internal/noder/decoder.go b/src/cmd/compile/internal/noder/decoder.go
new file mode 100644
index 0000000000..3dc61c6a69
--- /dev/null
+++ b/src/cmd/compile/internal/noder/decoder.go
@@ -0,0 +1,301 @@
+// UNREVIEWED
+
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package noder
+
+import (
+ "encoding/binary"
+ "fmt"
+ "go/constant"
+ "go/token"
+ "math/big"
+ "os"
+ "runtime"
+ "strings"
+
+ "cmd/compile/internal/base"
+)
+
+type pkgDecoder struct {
+ pkgPath string
+
+ elemEndsEnds [numRelocs]uint32
+ elemEnds []uint32
+ elemData string
+}
+
+func newPkgDecoder(pkgPath, input string) pkgDecoder {
+ pr := pkgDecoder{
+ pkgPath: pkgPath,
+ }
+
+ // TODO(mdempsky): Implement direct indexing of input string to
+ // avoid copying the position information.
+
+ r := strings.NewReader(input)
+
+ assert(binary.Read(r, binary.LittleEndian, pr.elemEndsEnds[:]) == nil)
+
+ pr.elemEnds = make([]uint32, pr.elemEndsEnds[len(pr.elemEndsEnds)-1])
+ assert(binary.Read(r, binary.LittleEndian, pr.elemEnds[:]) == nil)
+
+ pos, err := r.Seek(0, os.SEEK_CUR)
+ assert(err == nil)
+
+ pr.elemData = input[pos:]
+ assert(len(pr.elemData) == int(pr.elemEnds[len(pr.elemEnds)-1]))
+
+ return pr
+}
+
+func (pr *pkgDecoder) numElems(k reloc) int {
+ count := int(pr.elemEndsEnds[k])
+ if k > 0 {
+ count -= int(pr.elemEndsEnds[k-1])
+ }
+ return count
+}
+
+func (pr *pkgDecoder) totalElems() int {
+ return len(pr.elemEnds)
+}
+
+func (pr *pkgDecoder) absIdx(k reloc, idx int) int {
+ absIdx := idx
+ if k > 0 {
+ absIdx += int(pr.elemEndsEnds[k-1])
+ }
+ if absIdx >= int(pr.elemEndsEnds[k]) {
+ base.Fatalf("%v:%v is out of bounds; %v", k, idx, pr.elemEndsEnds)
+ }
+ return absIdx
+}
+
+func (pr *pkgDecoder) dataIdx(k reloc, idx int) string {
+ absIdx := pr.absIdx(k, idx)
+
+ var start uint32
+ if absIdx > 0 {
+ start = pr.elemEnds[absIdx-1]
+ }
+ end := pr.elemEnds[absIdx]
+
+ return pr.elemData[start:end]
+}
+
+func (pr *pkgDecoder) stringIdx(idx int) string {
+ return pr.dataIdx(relocString, idx)
+}
+
+func (pr *pkgDecoder) newDecoder(k reloc, idx int, marker syncMarker) decoder {
+ r := pr.newDecoderRaw(k, idx)
+ r.sync(marker)
+ return r
+}
+
+func (pr *pkgDecoder) newDecoderRaw(k reloc, idx int) decoder {
+ r := decoder{
+ common: pr,
+ k: k,
+ idx: idx,
+ }
+
+ // TODO(mdempsky) r.data.Reset(...) after #44505 is resolved.
+ r.data = *strings.NewReader(pr.dataIdx(k, idx))
+
+ r.sync(syncRelocs)
+ r.relocs = make([]relocEnt, r.len())
+ for i := range r.relocs {
+ r.sync(syncReloc)
+ r.relocs[i] = relocEnt{reloc(r.len()), r.len()}
+ }
+
+ return r
+}
+
+type decoder struct {
+ common *pkgDecoder
+
+ relocs []relocEnt
+ data strings.Reader
+
+ k reloc
+ idx int
+}
+
+func (r *decoder) checkErr(err error) {
+ if err != nil {
+ base.Fatalf("unexpected error: %v", err)
+ }
+}
+
+func (r *decoder) rawUvarint() uint64 {
+ x, err := binary.ReadUvarint(&r.data)
+ r.checkErr(err)
+ return x
+}
+
+func (r *decoder) rawVarint() int64 {
+ ux := r.rawUvarint()
+
+ // Zig-zag decode.
+ x := int64(ux >> 1)
+ if ux&1 != 0 {
+ x = ^x
+ }
+ return x
+}
+
+func (r *decoder) rawReloc(k reloc, idx int) int {
+ e := r.relocs[idx]
+ assert(e.kind == k)
+ return e.idx
+}
+
+func (r *decoder) sync(mWant syncMarker) {
+ if !enableSync {
+ return
+ }
+
+ pos, _ := r.data.Seek(0, os.SEEK_CUR) // TODO(mdempsky): io.SeekCurrent after #44505 is resolved
+ mHave := syncMarker(r.rawUvarint())
+ writerPCs := make([]int, r.rawUvarint())
+ for i := range writerPCs {
+ writerPCs[i] = int(r.rawUvarint())
+ }
+
+ if mHave == mWant {
+ return
+ }
+
+ // There's some tension here between printing:
+ //
+ // (1) full file paths that tools can recognize (e.g., so emacs
+ // hyperlinks the "file:line" text for easy navigation), or
+ //
+ // (2) short file paths that are easier for humans to read (e.g., by
+ // omitting redundant or irrelevant details, so it's easier to
+ // focus on the useful bits that remain).
+ //
+ // The current formatting favors the former, as it seems more
+ // helpful in practice. But perhaps the formatting could be improved
+ // to better address both concerns. For example, use relative file
+ // paths if they would be shorter, or rewrite file paths to contain
+ // "$GOROOT" (like objabi.AbsFile does) if tools can be taught how
+ // to reliably expand that again.
+
+ fmt.Printf("export data desync: package %q, section %v, index %v, offset %v\n", r.common.pkgPath, r.k, r.idx, pos)
+
+ fmt.Printf("\nfound %v, written at:\n", mHave)
+ if len(writerPCs) == 0 {
+ fmt.Printf("\t[stack trace unavailable; recompile package %q with -d=syncframes]\n", r.common.pkgPath)
+ }
+ for _, pc := range writerPCs {
+ fmt.Printf("\t%s\n", r.common.stringIdx(r.rawReloc(relocString, pc)))
+ }
+
+ fmt.Printf("\nexpected %v, reading at:\n", mWant)
+ var readerPCs [32]uintptr // TODO(mdempsky): Dynamically size?
+ n := runtime.Callers(2, readerPCs[:])
+ for _, pc := range fmtFrames(readerPCs[:n]...) {
+ fmt.Printf("\t%s\n", pc)
+ }
+
+ // We already printed a stack trace for the reader, so now we can
+ // simply exit. Printing a second one with panic or base.Fatalf
+ // would just be noise.
+ os.Exit(1)
+}
+
+func (r *decoder) bool() bool {
+ r.sync(syncBool)
+ x, err := r.data.ReadByte()
+ r.checkErr(err)
+ assert(x < 2)
+ return x != 0
+}
+
+func (r *decoder) int64() int64 {
+ r.sync(syncInt64)
+ return r.rawVarint()
+}
+
+func (r *decoder) uint64() uint64 {
+ r.sync(syncUint64)
+ return r.rawUvarint()
+}
+
+func (r *decoder) len() int { x := r.uint64(); v := int(x); assert(uint64(v) == x); return v }
+func (r *decoder) int() int { x := r.int64(); v := int(x); assert(int64(v) == x); return v }
+func (r *decoder) uint() uint { x := r.uint64(); v := uint(x); assert(uint64(v) == x); return v }
+
+func (r *decoder) code(mark syncMarker) int {
+ r.sync(mark)
+ return r.len()
+}
+
+func (r *decoder) reloc(k reloc) int {
+ r.sync(syncUseReloc)
+ return r.rawReloc(k, r.len())
+}
+
+func (r *decoder) string() string {
+ r.sync(syncString)
+ return r.common.stringIdx(r.reloc(relocString))
+}
+
+func (r *decoder) strings() []string {
+ res := make([]string, r.len())
+ for i := range res {
+ res[i] = r.string()
+ }
+ return res
+}
+
+func (r *decoder) rawValue() constant.Value {
+ isComplex := r.bool()
+ val := r.scalar()
+ if isComplex {
+ val = constant.BinaryOp(val, token.ADD, constant.MakeImag(r.scalar()))
+ }
+ return val
+}
+
+func (r *decoder) scalar() constant.Value {
+ switch tag := codeVal(r.code(syncVal)); tag {
+ default:
+ panic(fmt.Sprintf("unexpected scalar tag: %v", tag))
+
+ case valBool:
+ return constant.MakeBool(r.bool())
+ case valString:
+ return constant.MakeString(r.string())
+ case valInt64:
+ return constant.MakeInt64(r.int64())
+ case valBigInt:
+ return constant.Make(r.bigInt())
+ case valBigRat:
+ num := r.bigInt()
+ denom := r.bigInt()
+ return constant.Make(new(big.Rat).SetFrac(num, denom))
+ case valBigFloat:
+ return constant.Make(r.bigFloat())
+ }
+}
+
+func (r *decoder) bigInt() *big.Int {
+ v := new(big.Int).SetBytes([]byte(r.string()))
+ if r.bool() {
+ v.Neg(v)
+ }
+ return v
+}
+
+func (r *decoder) bigFloat() *big.Float {
+ v := new(big.Float).SetPrec(512)
+ assert(v.UnmarshalText([]byte(r.string())) == nil)
+ return v
+}
diff --git a/src/cmd/compile/internal/noder/encoder.go b/src/cmd/compile/internal/noder/encoder.go
new file mode 100644
index 0000000000..d8ab0f6255
--- /dev/null
+++ b/src/cmd/compile/internal/noder/encoder.go
@@ -0,0 +1,284 @@
+// UNREVIEWED
+
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package noder
+
+import (
+ "bytes"
+ "encoding/binary"
+ "fmt"
+ "go/constant"
+ "io"
+ "math/big"
+ "runtime"
+
+ "cmd/compile/internal/base"
+)
+
+type pkgEncoder struct {
+ elems [numRelocs][]string
+
+ stringsIdx map[string]int
+}
+
+func newPkgEncoder() pkgEncoder {
+ return pkgEncoder{
+ stringsIdx: make(map[string]int),
+ }
+}
+
+func (pw *pkgEncoder) dump(out io.Writer) {
+ writeUint32 := func(x uint32) {
+ assert(binary.Write(out, binary.LittleEndian, x) == nil)
+ }
+
+ var sum uint32
+ for _, elems := range &pw.elems {
+ sum += uint32(len(elems))
+ writeUint32(sum)
+ }
+
+ sum = 0
+ for _, elems := range &pw.elems {
+ for _, elem := range elems {
+ sum += uint32(len(elem))
+ writeUint32(sum)
+ }
+ }
+
+ for _, elems := range &pw.elems {
+ for _, elem := range elems {
+ _, err := io.WriteString(out, elem)
+ assert(err == nil)
+ }
+ }
+}
+
+func (pw *pkgEncoder) stringIdx(s string) int {
+ if idx, ok := pw.stringsIdx[s]; ok {
+ assert(pw.elems[relocString][idx] == s)
+ return idx
+ }
+
+ idx := len(pw.elems[relocString])
+ pw.elems[relocString] = append(pw.elems[relocString], s)
+ pw.stringsIdx[s] = idx
+ return idx
+}
+
+func (pw *pkgEncoder) newEncoder(k reloc, marker syncMarker) encoder {
+ e := pw.newEncoderRaw(k)
+ e.sync(marker)
+ return e
+}
+
+func (pw *pkgEncoder) newEncoderRaw(k reloc) encoder {
+ idx := len(pw.elems[k])
+ pw.elems[k] = append(pw.elems[k], "") // placeholder
+
+ return encoder{
+ p: pw,
+ k: k,
+ idx: idx,
+ }
+}
+
+// Encoders
+
+type encoder struct {
+ p *pkgEncoder
+
+ relocs []relocEnt
+ data bytes.Buffer
+
+ encodingRelocHeader bool
+
+ k reloc
+ idx int
+}
+
+func (w *encoder) flush() int {
+ var sb bytes.Buffer // TODO(mdempsky): strings.Builder after #44505 is resolved
+
+ // Backup the data so we write the relocations at the front.
+ var tmp bytes.Buffer
+ io.Copy(&tmp, &w.data)
+
+ // TODO(mdempsky): Consider writing these out separately so they're
+ // easier to strip, along with function bodies, so that we can prune
+ // down to just the data that's relevant to go/types.
+ if w.encodingRelocHeader {
+ base.Fatalf("encodingRelocHeader already true; recursive flush?")
+ }
+ w.encodingRelocHeader = true
+ w.sync(syncRelocs)
+ w.len(len(w.relocs))
+ for _, rent := range w.relocs {
+ w.sync(syncReloc)
+ w.len(int(rent.kind))
+ w.len(rent.idx)
+ }
+
+ io.Copy(&sb, &w.data)
+ io.Copy(&sb, &tmp)
+ w.p.elems[w.k][w.idx] = sb.String()
+
+ return w.idx
+}
+
+func (w *encoder) checkErr(err error) {
+ if err != nil {
+ base.Fatalf("unexpected error: %v", err)
+ }
+}
+
+func (w *encoder) rawUvarint(x uint64) {
+ var buf [binary.MaxVarintLen64]byte
+ n := binary.PutUvarint(buf[:], x)
+ _, err := w.data.Write(buf[:n])
+ w.checkErr(err)
+}
+
+func (w *encoder) rawVarint(x int64) {
+ // Zig-zag encode.
+ ux := uint64(x) << 1
+ if x < 0 {
+ ux = ^ux
+ }
+
+ w.rawUvarint(ux)
+}
+
+func (w *encoder) rawReloc(r reloc, idx int) int {
+ // TODO(mdempsky): Use map for lookup.
+ for i, rent := range w.relocs {
+ if rent.kind == r && rent.idx == idx {
+ return i
+ }
+ }
+
+ i := len(w.relocs)
+ w.relocs = append(w.relocs, relocEnt{r, idx})
+ return i
+}
+
+func (w *encoder) sync(m syncMarker) {
+ if !enableSync {
+ return
+ }
+
+ // Writing out stack frame string references requires working
+ // relocations, but writing out the relocations themselves involves
+ // sync markers. To prevent infinite recursion, we simply trim the
+ // stack frame for sync markers within the relocation header.
+ var frames []string
+ if !w.encodingRelocHeader && base.Debug.SyncFrames > 0 {
+ pcs := make([]uintptr, base.Debug.SyncFrames)
+ n := runtime.Callers(2, pcs)
+ frames = fmtFrames(pcs[:n]...)
+ }
+
+ // TODO(mdempsky): Save space by writing out stack frames as a
+ // linked list so we can share common stack frames.
+ w.rawUvarint(uint64(m))
+ w.rawUvarint(uint64(len(frames)))
+ for _, frame := range frames {
+ w.rawUvarint(uint64(w.rawReloc(relocString, w.p.stringIdx(frame))))
+ }
+}
+
+func (w *encoder) bool(b bool) bool {
+ w.sync(syncBool)
+ var x byte
+ if b {
+ x = 1
+ }
+ err := w.data.WriteByte(x)
+ w.checkErr(err)
+ return b
+}
+
+func (w *encoder) int64(x int64) {
+ w.sync(syncInt64)
+ w.rawVarint(x)
+}
+
+func (w *encoder) uint64(x uint64) {
+ w.sync(syncUint64)
+ w.rawUvarint(x)
+}
+
+func (w *encoder) len(x int) { assert(x >= 0); w.uint64(uint64(x)) }
+func (w *encoder) int(x int) { w.int64(int64(x)) }
+func (w *encoder) uint(x uint) { w.uint64(uint64(x)) }
+
+func (w *encoder) reloc(r reloc, idx int) {
+ w.sync(syncUseReloc)
+ w.len(w.rawReloc(r, idx))
+}
+
+func (w *encoder) code(c code) {
+ w.sync(c.marker())
+ w.len(c.value())
+}
+
+func (w *encoder) string(s string) {
+ w.sync(syncString)
+ w.reloc(relocString, w.p.stringIdx(s))
+}
+
+func (w *encoder) strings(ss []string) {
+ w.len(len(ss))
+ for _, s := range ss {
+ w.string(s)
+ }
+}
+
+func (w *encoder) rawValue(val constant.Value) {
+ if w.bool(val.Kind() == constant.Complex) {
+ w.scalar(constant.Real(val))
+ w.scalar(constant.Imag(val))
+ } else {
+ w.scalar(val)
+ }
+}
+
+func (w *encoder) scalar(val constant.Value) {
+ switch v := constant.Val(val).(type) {
+ default:
+ panic(fmt.Sprintf("unhandled %v (%v)", val, val.Kind()))
+ case bool:
+ w.code(valBool)
+ w.bool(v)
+ case string:
+ w.code(valString)
+ w.string(v)
+ case int64:
+ w.code(valInt64)
+ w.int64(v)
+ case *big.Int:
+ w.code(valBigInt)
+ w.bigInt(v)
+ case *big.Rat:
+ w.code(valBigRat)
+ w.bigInt(v.Num())
+ w.bigInt(v.Denom())
+ case *big.Float:
+ w.code(valBigFloat)
+ w.bigFloat(v)
+ }
+}
+
+func (w *encoder) bigInt(v *big.Int) {
+ b := v.Bytes()
+ w.string(string(b)) // TODO: More efficient encoding.
+ w.bool(v.Sign() < 0)
+}
+
+func (w *encoder) bigFloat(v *big.Float) {
+ b := v.Append(nil, 'p', -1)
+ w.string(string(b)) // TODO: More efficient encoding.
+}
diff --git a/src/cmd/compile/internal/noder/export.go b/src/cmd/compile/internal/noder/export.go
new file mode 100644
index 0000000000..1a296e22c8
--- /dev/null
+++ b/src/cmd/compile/internal/noder/export.go
@@ -0,0 +1,65 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package noder
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/typecheck"
+ "cmd/internal/bio"
+)
+
+// writeNewExportFunc is a hook that can be added to append extra
+// export data after the normal export data section. It allows
+// experimenting with new export data format designs without requiring
+// immediate support in the go/internal or x/tools importers.
+var writeNewExportFunc func(out io.Writer)
+
+func WriteExports(out *bio.Writer) {
+ // When unified IR exports are enable, we simply append it to the
+ // end of the normal export data (with compiler extensions
+ // disabled), and write an extra header giving its size.
+ //
+ // If the compiler sees this header, it knows to read the new data
+ // instead; meanwhile the go/types importers will silently ignore it
+ // and continue processing the old export instead.
+ //
+ // This allows us to experiment with changes to the new export data
+ // format without needing to update the go/internal/gcimporter or
+ // (worse) x/tools/go/gcexportdata.
+
+ useNewExport := writeNewExportFunc != nil
+
+ var old, new bytes.Buffer
+
+ typecheck.WriteExports(&old, !useNewExport)
+
+ if useNewExport {
+ writeNewExportFunc(&new)
+ }
+
+ oldLen := old.Len()
+ newLen := new.Len()
+
+ if useNewExport {
+ fmt.Fprintf(out, "\nnewexportsize %v\n", newLen)
+ }
+
+ // The linker also looks for the $$ marker - use char after $$ to distinguish format.
+ out.WriteString("\n$$B\n") // indicate binary export format
+ io.Copy(out, &old)
+ out.WriteString("\n$$\n")
+ io.Copy(out, &new)
+
+ if base.Debug.Export != 0 {
+ fmt.Printf("BenchmarkExportSize:%s 1 %d bytes\n", base.Ctxt.Pkgpath, oldLen)
+ if useNewExport {
+ fmt.Printf("BenchmarkNewExportSize:%s 1 %d bytes\n", base.Ctxt.Pkgpath, newLen)
+ }
+ }
+}
diff --git a/src/cmd/compile/internal/noder/expr.go b/src/cmd/compile/internal/noder/expr.go
index c7695ed920..58637dca39 100644
--- a/src/cmd/compile/internal/noder/expr.go
+++ b/src/cmd/compile/internal/noder/expr.go
@@ -5,6 +5,8 @@
package noder
import (
+ "fmt"
+
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/syntax"
@@ -15,6 +17,8 @@ import (
)
func (g *irgen) expr(expr syntax.Expr) ir.Node {
+ expr = unparen(expr) // skip parens; unneeded after parse+typecheck
+
if expr == nil {
return nil
}
@@ -46,6 +50,8 @@ func (g *irgen) expr(expr syntax.Expr) ir.Node {
base.FatalfAt(g.pos(expr), "unrecognized type-checker result")
}
+ base.Assert(g.exprStmtOK)
+
// The gc backend expects all expressions to have a concrete type, and
// types2 mostly satisfies this expectation already. But there are a few
// cases where the Go spec doesn't require converting to concrete type,
@@ -67,14 +73,16 @@ func (g *irgen) expr(expr syntax.Expr) ir.Node {
// Constant expression.
if tv.Value != nil {
- return Const(g.pos(expr), g.typ(typ), tv.Value)
+ typ := g.typ(typ)
+ value := FixValue(typ, tv.Value)
+ return OrigConst(g.pos(expr), typ, value, constExprOp(expr), syntax.String(expr))
}
n := g.expr0(typ, expr)
if n.Typecheck() != 1 && n.Typecheck() != 3 {
base.FatalfAt(g.pos(expr), "missed typecheck: %+v", n)
}
- if !g.match(n.Type(), typ, tv.HasOk()) {
+ if n.Op() != ir.OFUNCINST && !g.match(n.Type(), typ, tv.HasOk()) {
base.FatalfAt(g.pos(expr), "expected %L to have type %v", n, typ)
}
return n
@@ -82,6 +90,11 @@ func (g *irgen) expr(expr syntax.Expr) ir.Node {
func (g *irgen) expr0(typ types2.Type, expr syntax.Expr) ir.Node {
pos := g.pos(expr)
+ assert(pos.IsKnown())
+
+ // Set base.Pos for transformation code that still uses base.Pos, rather than
+ // the pos of the node being converted.
+ base.Pos = pos
switch expr := expr.(type) {
case *syntax.Name:
@@ -105,23 +118,30 @@ func (g *irgen) expr0(typ types2.Type, expr syntax.Expr) ir.Node {
// The key for the Inferred map is the CallExpr (if inferring
// types required the function arguments) or the IndexExpr below
// (if types could be inferred without the function arguments).
- if inferred, ok := g.info.Inferred[expr]; ok && len(inferred.Targs) > 0 {
+ if inferred, ok := g.info.Inferred[expr]; ok && inferred.TArgs.Len() > 0 {
// This is the case where inferring types required the
// types of the function arguments.
- targs := make([]ir.Node, len(inferred.Targs))
- for i, targ := range inferred.Targs {
- targs[i] = ir.TypeNode(g.typ(targ))
+ targs := make([]ir.Node, inferred.TArgs.Len())
+ for i := range targs {
+ targs[i] = ir.TypeNode(g.typ(inferred.TArgs.At(i)))
}
if fun.Op() == ir.OFUNCINST {
// Replace explicit type args with the full list that
- // includes the additional inferred type args
+ // includes the additional inferred type args.
+ // Substitute the type args for the type params in
+ // the generic function's type.
fun.(*ir.InstExpr).Targs = targs
+ newt := g.substType(fun.Type(), fun.Type().TParams(), targs)
+ typed(newt, fun)
} else {
- // Create a function instantiation here, given
- // there are only inferred type args (e.g.
- // min(5,6), where min is a generic function)
+ // Create a function instantiation here, given there
+ // are only inferred type args (e.g. min(5,6), where
+ // min is a generic function). Substitute the type
+ // args for the type params in the generic function's
+ // type.
inst := ir.NewInstExpr(pos, ir.OFUNCINST, fun, targs)
- typed(fun.Type(), inst)
+ newt := g.substType(fun.Type(), fun.Type().TParams(), targs)
+ typed(newt, inst)
fun = inst
}
@@ -131,13 +151,13 @@ func (g *irgen) expr0(typ types2.Type, expr syntax.Expr) ir.Node {
case *syntax.IndexExpr:
var targs []ir.Node
- if inferred, ok := g.info.Inferred[expr]; ok && len(inferred.Targs) > 0 {
+ if inferred, ok := g.info.Inferred[expr]; ok && inferred.TArgs.Len() > 0 {
// This is the partial type inference case where the types
// can be inferred from other type arguments without using
// the types of the function arguments.
- targs = make([]ir.Node, len(inferred.Targs))
- for i, targ := range inferred.Targs {
- targs[i] = ir.TypeNode(g.typ(targ))
+ targs = make([]ir.Node, inferred.TArgs.Len())
+ for i := range targs {
+ targs[i] = ir.TypeNode(g.typ(inferred.TArgs.At(i)))
}
} else if _, ok := expr.Index.(*syntax.ListExpr); ok {
targs = g.exprList(expr.Index)
@@ -158,12 +178,16 @@ func (g *irgen) expr0(typ types2.Type, expr syntax.Expr) ir.Node {
panic("Incorrect argument for generic func instantiation")
}
n := ir.NewInstExpr(pos, ir.OFUNCINST, x, targs)
- typed(g.typ(typ), n)
+ newt := g.typ(typ)
+ // Substitute the type args for the type params in the uninstantiated
+ // function's type. If there aren't enough type args, then the rest
+ // will be inferred at the call node, so don't try the substitution yet.
+ if x.Type().TParams().NumFields() == len(targs) {
+ newt = g.substType(g.typ(typ), x.Type().TParams(), targs)
+ }
+ typed(newt, n)
return n
- case *syntax.ParenExpr:
- return g.expr(expr.X) // skip parens; unneeded after parse+typecheck
-
case *syntax.SelectorExpr:
// Qualified identifier.
if name, ok := expr.X.(*syntax.Name); ok {
@@ -193,7 +217,29 @@ func (g *irgen) expr0(typ types2.Type, expr syntax.Expr) ir.Node {
}
}
-// selectorExpr resolves the choice of ODOT, ODOTPTR, OCALLPART (eventually
+// substType does a normal type substition, but tparams is in the form of a field
+// list, and targs is in terms of a slice of type nodes. substType records any newly
+// instantiated types into g.instTypeList.
+func (g *irgen) substType(typ *types.Type, tparams *types.Type, targs []ir.Node) *types.Type {
+ fields := tparams.FieldSlice()
+ tparams1 := make([]*types.Type, len(fields))
+ for i, f := range fields {
+ tparams1[i] = f.Type
+ }
+ targs1 := make([]*types.Type, len(targs))
+ for i, n := range targs {
+ targs1[i] = n.Type()
+ }
+ ts := typecheck.Tsubster{
+ Tparams: tparams1,
+ Targs: targs1,
+ }
+ newt := ts.Typ(typ)
+ g.instTypeList = append(g.instTypeList, ts.InstTypeList...)
+ return newt
+}
+
+// selectorExpr resolves the choice of ODOT, ODOTPTR, OMETHVALUE (eventually
// ODOTMETH & ODOTINTER), and OMETHEXPR and deals with embedded fields here rather
// than in typecheck.go.
func (g *irgen) selectorExpr(pos src.XPos, typ types2.Type, expr *syntax.SelectorExpr) ir.Node {
@@ -203,6 +249,44 @@ func (g *irgen) selectorExpr(pos src.XPos, typ types2.Type, expr *syntax.Selecto
// only be fully transformed once it has an instantiated type.
n := ir.NewSelectorExpr(pos, ir.OXDOT, x, typecheck.Lookup(expr.Sel.Value))
typed(g.typ(typ), n)
+
+ // Fill in n.Selection for a generic method reference or a bound
+ // interface method, even though we won't use it directly, since it
+ // is useful for analysis. Specifically do not fill in for fields or
+ // other interfaces methods (method call on an interface value), so
+ // n.Selection being non-nil means a method reference for a generic
+ // type or a method reference due to a bound.
+ obj2 := g.info.Selections[expr].Obj()
+ sig := types2.AsSignature(obj2.Type())
+ if sig == nil || sig.Recv() == nil {
+ return n
+ }
+ index := g.info.Selections[expr].Index()
+ last := index[len(index)-1]
+ // recvType is the receiver of the method being called. Because of the
+ // way methods are imported, g.obj(obj2) doesn't work across
+ // packages, so we have to lookup the method via the receiver type.
+ recvType := deref2(sig.Recv().Type())
+ if types2.AsInterface(recvType.Underlying()) != nil {
+ fieldType := n.X.Type()
+ for _, ix := range index[:len(index)-1] {
+ fieldType = fieldType.Field(ix).Type
+ }
+ if fieldType.Kind() == types.TTYPEPARAM {
+ n.Selection = fieldType.Bound().AllMethods().Index(last)
+ //fmt.Printf(">>>>> %v: Bound call %v\n", base.FmtPos(pos), n.Sel)
+ } else {
+ assert(fieldType.Kind() == types.TINTER)
+ //fmt.Printf(">>>>> %v: Interface call %v\n", base.FmtPos(pos), n.Sel)
+ }
+ return n
+ }
+
+ recvObj := types2.AsNamed(recvType).Obj()
+ recv := g.pkg(recvObj.Pkg()).Lookup(recvObj.Name()).Def
+ n.Selection = recv.Type().Methods().Index(last)
+ //fmt.Printf(">>>>> %v: Method call %v\n", base.FmtPos(pos), n.Sel)
+
return n
}
@@ -259,14 +343,18 @@ func (g *irgen) selectorExpr(pos src.XPos, typ types2.Type, expr *syntax.Selecto
if wantPtr {
recvType2Base = types2.AsPointer(recvType2).Elem()
}
- if len(types2.AsNamed(recvType2Base).TParams()) > 0 {
+ if types2.AsNamed(recvType2Base).TParams().Len() > 0 {
// recvType2 is the original generic type that is
// instantiated for this method call.
// selinfo.Recv() is the instantiated type
recvType2 = recvType2Base
- // method is the generic method associated with the gen type
- method := g.obj(types2.AsNamed(recvType2).Method(last))
- n = ir.NewSelectorExpr(pos, ir.OCALLPART, x, method.Sym())
+ recvTypeSym := g.pkg(method2.Pkg()).Lookup(recvType2.(*types2.Named).Obj().Name())
+ recvType := recvTypeSym.Def.(*ir.Name).Type()
+ // method is the generic method associated with
+ // the base generic type. The instantiated type may not
+ // have method bodies filled in, if it was imported.
+ method := recvType.Methods().Index(last).Nname.(*ir.Name)
+ n = ir.NewSelectorExpr(pos, ir.OMETHVALUE, x, typecheck.Lookup(expr.Sel.Value))
n.(*ir.SelectorExpr).Selection = types.NewField(pos, method.Sym(), method.Type())
n.(*ir.SelectorExpr).Selection.Nname = method
typed(method.Type(), n)
@@ -274,9 +362,9 @@ func (g *irgen) selectorExpr(pos src.XPos, typ types2.Type, expr *syntax.Selecto
// selinfo.Targs() are the types used to
// instantiate the type of receiver
targs2 := getTargs(selinfo)
- targs := make([]ir.Node, len(targs2))
- for i, targ2 := range targs2 {
- targs[i] = ir.TypeNode(g.typ(targ2))
+ targs := make([]ir.Node, targs2.Len())
+ for i := range targs {
+ targs[i] = ir.TypeNode(g.typ(targs2.At(i)))
}
// Create function instantiation with the type
@@ -300,11 +388,8 @@ func (g *irgen) selectorExpr(pos src.XPos, typ types2.Type, expr *syntax.Selecto
}
// getTargs gets the targs associated with the receiver of a selected method
-func getTargs(selinfo *types2.Selection) []types2.Type {
- r := selinfo.Recv()
- if p := types2.AsPointer(r); p != nil {
- r = p.Elem()
- }
+func getTargs(selinfo *types2.Selection) *types2.TypeList {
+ r := deref2(selinfo.Recv())
n := types2.AsNamed(r)
if n == nil {
base.Fatalf("Incorrect type for selinfo %v", selinfo)
@@ -313,13 +398,17 @@ func getTargs(selinfo *types2.Selection) []types2.Type {
}
func (g *irgen) exprList(expr syntax.Expr) []ir.Node {
+ return g.exprs(unpackListExpr(expr))
+}
+
+func unpackListExpr(expr syntax.Expr) []syntax.Expr {
switch expr := expr.(type) {
case nil:
return nil
case *syntax.ListExpr:
- return g.exprs(expr.ElemList)
+ return expr.ElemList
default:
- return []ir.Node{g.expr(expr)}
+ return []syntax.Expr{expr}
}
}
@@ -344,11 +433,13 @@ func (g *irgen) compLit(typ types2.Type, lit *syntax.CompositeLit) ir.Node {
for i, elem := range lit.ElemList {
switch elem := elem.(type) {
case *syntax.KeyValueExpr:
+ var key ir.Node
if isStruct {
- exprs[i] = ir.NewStructKeyExpr(g.pos(elem), g.name(elem.Key.(*syntax.Name)), g.expr(elem.Value))
+ key = ir.NewIdent(g.pos(elem.Key), g.name(elem.Key.(*syntax.Name)))
} else {
- exprs[i] = ir.NewKeyExpr(g.pos(elem), g.expr(elem.Key), g.expr(elem.Value))
+ key = g.expr(elem.Key)
}
+ exprs[i] = ir.NewKeyExpr(g.pos(elem), key, g.expr(elem.Value))
default:
exprs[i] = g.expr(elem)
}
@@ -360,19 +451,13 @@ func (g *irgen) compLit(typ types2.Type, lit *syntax.CompositeLit) ir.Node {
}
func (g *irgen) funcLit(typ2 types2.Type, expr *syntax.FuncLit) ir.Node {
- fn := ir.NewFunc(g.pos(expr))
- fn.SetIsHiddenClosure(ir.CurFunc != nil)
+ fn := ir.NewClosureFunc(g.pos(expr), ir.CurFunc != nil)
+ ir.NameClosure(fn.OClosure, ir.CurFunc)
- fn.Nname = ir.NewNameAt(g.pos(expr), typecheck.ClosureName(ir.CurFunc))
- ir.MarkFunc(fn.Nname)
typ := g.typ(typ2)
- fn.Nname.Func = fn
- fn.Nname.Defn = fn
typed(typ, fn.Nname)
- fn.SetTypecheck(1)
-
- fn.OClosure = ir.NewClosureExpr(g.pos(expr), fn)
typed(typ, fn.OClosure)
+ fn.SetTypecheck(1)
g.funcBody(fn, nil, expr.Type, expr.Body)
@@ -386,9 +471,14 @@ func (g *irgen) funcLit(typ2 types2.Type, expr *syntax.FuncLit) ir.Node {
cv.SetWalkdef(1)
}
- g.target.Decls = append(g.target.Decls, fn)
-
- return fn.OClosure
+ if g.topFuncIsGeneric {
+ // Don't add any closure inside a generic function/method to the
+ // g.target.Decls list, even though it may not be generic itself.
+ // See issue #47514.
+ return ir.UseClosure(fn.OClosure, nil)
+ } else {
+ return ir.UseClosure(fn.OClosure, g.target)
+ }
}
func (g *irgen) typeExpr(typ syntax.Expr) *types.Type {
@@ -398,3 +488,35 @@ func (g *irgen) typeExpr(typ syntax.Expr) *types.Type {
}
return n.Type()
}
+
+// constExprOp returns an ir.Op that represents the outermost
+// operation of the given constant expression. It's intended for use
+// with ir.RawOrigExpr.
+func constExprOp(expr syntax.Expr) ir.Op {
+ switch expr := expr.(type) {
+ default:
+ panic(fmt.Sprintf("%s: unexpected expression: %T", expr.Pos(), expr))
+
+ case *syntax.BasicLit:
+ return ir.OLITERAL
+ case *syntax.Name, *syntax.SelectorExpr:
+ return ir.ONAME
+ case *syntax.CallExpr:
+ return ir.OCALL
+ case *syntax.Operation:
+ if expr.Y == nil {
+ return unOps[expr.Op]
+ }
+ return binOps[expr.Op]
+ }
+}
+
+func unparen(expr syntax.Expr) syntax.Expr {
+ for {
+ paren, ok := expr.(*syntax.ParenExpr)
+ if !ok {
+ return expr
+ }
+ expr = paren.X
+ }
+}
diff --git a/src/cmd/compile/internal/noder/frames_go1.go b/src/cmd/compile/internal/noder/frames_go1.go
new file mode 100644
index 0000000000..d00e0f51f9
--- /dev/null
+++ b/src/cmd/compile/internal/noder/frames_go1.go
@@ -0,0 +1,21 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !go1.7
+// +build !go1.7
+
+// TODO(mdempsky): Remove after #44505 is resolved
+
+package noder
+
+import "runtime"
+
+func walkFrames(pcs []uintptr, visit frameVisitor) {
+ for _, pc := range pcs {
+ fn := runtime.FuncForPC(pc)
+ file, line := fn.FileLine(pc)
+
+ visit(file, line, fn.Name(), pc-fn.Entry())
+ }
+}
diff --git a/src/cmd/compile/internal/noder/frames_go17.go b/src/cmd/compile/internal/noder/frames_go17.go
new file mode 100644
index 0000000000..48d77625b4
--- /dev/null
+++ b/src/cmd/compile/internal/noder/frames_go17.go
@@ -0,0 +1,25 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.7
+// +build go1.7
+
+package noder
+
+import "runtime"
+
+func walkFrames(pcs []uintptr, visit frameVisitor) {
+ if len(pcs) == 0 {
+ return
+ }
+
+ frames := runtime.CallersFrames(pcs)
+ for {
+ frame, more := frames.Next()
+ visit(frame.File, frame.Line, frame.Function, frame.PC-frame.Entry)
+ if !more {
+ return
+ }
+ }
+}
diff --git a/src/cmd/compile/internal/noder/helpers.go b/src/cmd/compile/internal/noder/helpers.go
index 9da0e49300..9487e76336 100644
--- a/src/cmd/compile/internal/noder/helpers.go
+++ b/src/cmd/compile/internal/noder/helpers.go
@@ -43,6 +43,32 @@ func Const(pos src.XPos, typ *types.Type, val constant.Value) ir.Node {
return typed(typ, ir.NewBasicLit(pos, val))
}
+func OrigConst(pos src.XPos, typ *types.Type, val constant.Value, op ir.Op, raw string) ir.Node {
+ orig := ir.NewRawOrigExpr(pos, op, raw)
+ return ir.NewConstExpr(val, typed(typ, orig))
+}
+
+// FixValue returns val after converting and truncating it as
+// appropriate for typ.
+func FixValue(typ *types.Type, val constant.Value) constant.Value {
+ assert(typ.Kind() != types.TFORW)
+ switch {
+ case typ.IsInteger():
+ val = constant.ToInt(val)
+ case typ.IsFloat():
+ val = constant.ToFloat(val)
+ case typ.IsComplex():
+ val = constant.ToComplex(val)
+ }
+ if !typ.IsUntyped() {
+ val = typecheck.DefaultLit(ir.NewBasicLit(src.NoXPos, val), typ).Val()
+ }
+ if !typ.IsTypeParam() {
+ ir.AssertValidTypeForConst(typ, val)
+ }
+ return val
+}
+
func Nil(pos src.XPos, typ *types.Type) ir.Node {
return typed(typ, ir.NewNilExpr(pos))
}
@@ -87,15 +113,15 @@ func Binary(pos src.XPos, op ir.Op, typ *types.Type, x, y ir.Node) ir.Node {
func Call(pos src.XPos, typ *types.Type, fun ir.Node, args []ir.Node, dots bool) ir.Node {
n := ir.NewCallExpr(pos, ir.OCALL, fun, args)
n.IsDDD = dots
- // n.Use will be changed to ir.CallUseStmt in g.stmt() if this call is
- // just a statement (any return values are ignored).
- n.Use = ir.CallUseExpr
if fun.Op() == ir.OTYPE {
// Actually a type conversion, not a function call.
- if fun.Type().HasTParam() || args[0].Type().HasTParam() {
- // For type params, don't typecheck until we actually know
- // the type.
+ if !fun.Type().IsInterface() &&
+ (fun.Type().HasTParam() || args[0].Type().HasTParam()) {
+ // For type params, we can transform if fun.Type() is known
+ // to be an interface (in which case a CONVIFACE node will be
+ // inserted). Otherwise, don't typecheck until we actually
+ // know the type.
return typed(typ, n)
}
typed(typ, n)
@@ -103,24 +129,27 @@ func Call(pos src.XPos, typ *types.Type, fun ir.Node, args []ir.Node, dots bool)
}
if fun, ok := fun.(*ir.Name); ok && fun.BuiltinOp != 0 {
- // For Builtin ops, we currently stay with using the old
- // typechecker to transform the call to a more specific expression
- // and possibly use more specific ops. However, for a bunch of the
- // ops, we delay doing the old typechecker if any of the args have
- // type params, for a variety of reasons:
- //
- // OMAKE: hard to choose specific ops OMAKESLICE, etc. until arg type is known
- // OREAL/OIMAG: can't determine type float32/float64 until arg type know
- // OLEN/OCAP: old typechecker will complain if arg is not obviously a slice/array.
- // OAPPEND: old typechecker will complain if arg is not obviously slice, etc.
+ // For most Builtin ops, we delay doing transformBuiltin if any of the
+ // args have type params, for a variety of reasons:
//
- // We will eventually break out the transforming functionality
- // needed for builtin's, and call it here or during stenciling, as
- // appropriate.
+ // OMAKE: transformMake can't choose specific ops OMAKESLICE, etc.
+ // until arg type is known
+ // OREAL/OIMAG: transformRealImag can't determine type float32/float64
+ // until arg type known
+ // OAPPEND: transformAppend requires that the arg is a slice
+ // ODELETE: transformDelete requires that the arg is a map
+ // OALIGNOF, OSIZEOF: can be eval'ed to a constant until types known.
switch fun.BuiltinOp {
- case ir.OMAKE, ir.OREAL, ir.OIMAG, ir.OLEN, ir.OCAP, ir.OAPPEND:
+ case ir.OMAKE, ir.OREAL, ir.OIMAG, ir.OAPPEND, ir.ODELETE, ir.OALIGNOF, ir.OOFFSETOF, ir.OSIZEOF:
hasTParam := false
for _, arg := range args {
+ if fun.BuiltinOp == ir.OOFFSETOF {
+ // It's the type of left operand of the
+ // selection that matters, not the type of
+ // the field itself (which is irrelevant for
+ // offsetof).
+ arg = arg.(*ir.SelectorExpr).X
+ }
if arg.Type().HasTParam() {
hasTParam = true
break
@@ -137,10 +166,8 @@ func Call(pos src.XPos, typ *types.Type, fun ir.Node, args []ir.Node, dots bool)
// Add information, now that we know that fun is actually being called.
switch fun := fun.(type) {
- case *ir.ClosureExpr:
- fun.Func.SetClosureCalled(true)
case *ir.SelectorExpr:
- if fun.Op() == ir.OCALLPART {
+ if fun.Op() == ir.OMETHVALUE {
op := ir.ODOTMETH
if fun.X.Type().IsInterface() {
op = ir.ODOTINTER
@@ -152,46 +179,52 @@ func Call(pos src.XPos, typ *types.Type, fun ir.Node, args []ir.Node, dots bool)
}
}
- if fun.Type().HasTParam() {
- // If the fun arg is or has a type param, don't do any extra
+ if fun.Type().HasTParam() || fun.Op() == ir.OXDOT || fun.Op() == ir.OFUNCINST {
+ // If the fun arg is or has a type param, we can't do all the
// transformations, since we may not have needed properties yet
- // (e.g. number of return values, etc). The type param is probably
- // described by a structural constraint that requires it to be a
- // certain function type, etc., but we don't want to analyze that.
- return typed(typ, n)
- }
-
- if fun.Op() == ir.OXDOT {
- if !fun.(*ir.SelectorExpr).X.Type().HasTParam() {
- base.FatalfAt(pos, "Expecting type param receiver in %v", fun)
+ // (e.g. number of return values, etc). The same applies if a fun
+ // which is an XDOT could not be transformed yet because of a generic
+ // type in the X of the selector expression.
+ //
+ // A function instantiation (even if fully concrete) shouldn't be
+ // transformed yet, because we need to add the dictionary during the
+ // transformation.
+ //
+ // However, if we have a function type (even though it is
+ // parameterized), then we can add in any needed CONVIFACE nodes via
+ // typecheckaste(). We need to call transformArgs() to deal first
+ // with the f(g(()) case where g returns multiple return values. We
+ // can't do anything if fun is a type param (which is probably
+ // described by a structural constraint)
+ if fun.Type().Kind() == types.TFUNC {
+ transformArgs(n)
+ typecheckaste(ir.OCALL, fun, n.IsDDD, fun.Type().Params(), n.Args, true)
}
- // For methods called in a generic function, don't do any extra
- // transformations. We will do those later when we create the
- // instantiated function and have the correct receiver type.
- typed(typ, n)
- return n
- }
- if fun.Op() != ir.OFUNCINST {
- // If no type params, do the normal call transformations. This
- // will convert OCALL to OCALLFUNC.
- typed(typ, n)
- transformCall(n)
- return n
+ return typed(typ, n)
}
- // Leave the op as OCALL, which indicates the call still needs typechecking.
+ // If no type params, do the normal call transformations. This
+ // will convert OCALL to OCALLFUNC.
typed(typ, n)
+ transformCall(n)
return n
}
func Compare(pos src.XPos, typ *types.Type, op ir.Op, x, y ir.Node) ir.Node {
n := ir.NewBinaryExpr(pos, op, x, y)
if x.Type().HasTParam() || y.Type().HasTParam() {
- // Delay transformCompare() if either arg has a type param, since
- // it needs to know the exact types to decide on any needed conversions.
- n.SetType(typ)
- n.SetTypecheck(3)
- return n
+ xIsInt := x.Type().IsInterface()
+ yIsInt := y.Type().IsInterface()
+ if !(xIsInt && !yIsInt || !xIsInt && yIsInt) {
+ // If either arg is a type param, then we can still do the
+ // transformCompare() if we know that one arg is an interface
+ // and the other is not. Otherwise, we delay
+ // transformCompare(), since it needs to know the exact types
+ // to decide on any needed conversions.
+ n.SetType(typ)
+ n.SetTypecheck(3)
+ return n
+ }
}
typed(typ, n)
transformCompare(n)
@@ -225,7 +258,7 @@ func DotMethod(pos src.XPos, x ir.Node, index int) *ir.SelectorExpr {
// Method value.
typ := typecheck.NewMethodType(method.Type, nil)
- return dot(pos, typ, ir.OCALLPART, x, method)
+ return dot(pos, typ, ir.OMETHVALUE, x, method)
}
// MethodExpr returns a OMETHEXPR node with the indicated index into the methods
@@ -321,5 +354,15 @@ var one = constant.MakeInt64(1)
func IncDec(pos src.XPos, op ir.Op, x ir.Node) *ir.AssignOpStmt {
assert(x.Type() != nil)
- return ir.NewAssignOpStmt(pos, op, x, typecheck.DefaultLit(ir.NewBasicLit(pos, one), x.Type()))
+ bl := ir.NewBasicLit(pos, one)
+ if x.Type().HasTParam() {
+ // If the operand is generic, then types2 will have proved it must be
+ // a type that fits with increment/decrement, so just set the type of
+ // "one" to n.Type(). This works even for types that are eventually
+ // float or complex.
+ typed(x.Type(), bl)
+ } else {
+ bl = typecheck.DefaultLit(bl, x.Type())
+ }
+ return ir.NewAssignOpStmt(pos, op, x, bl)
}
diff --git a/src/cmd/compile/internal/noder/import.go b/src/cmd/compile/internal/noder/import.go
index 701e9001c8..48f0e48028 100644
--- a/src/cmd/compile/internal/noder/import.go
+++ b/src/cmd/compile/internal/noder/import.go
@@ -8,7 +8,6 @@ import (
"errors"
"fmt"
"internal/buildcfg"
- "io"
"os"
pathpkg "path"
"runtime"
@@ -32,8 +31,24 @@ import (
"cmd/internal/src"
)
-// Temporary import helper to get type2-based type-checking going.
+// haveLegacyImports records whether we've imported any packages
+// without a new export data section. This is useful for experimenting
+// with new export data format designs, when you need to support
+// existing tests that manually compile files with inconsistent
+// compiler flags.
+var haveLegacyImports = false
+
+// newReadImportFunc is an extension hook for experimenting with new
+// export data formats. If a new export data payload was written out
+// for an imported package by overloading writeNewExportFunc, then
+// that payload will be mapped into memory and passed to
+// newReadImportFunc.
+var newReadImportFunc = func(data string, pkg1 *types.Pkg, check *types2.Checker, packages map[string]*types2.Package) (pkg2 *types2.Package, err error) {
+ panic("unexpected new export data payload")
+}
+
type gcimports struct {
+ check *types2.Checker
packages map[string]*types2.Package
}
@@ -46,13 +61,8 @@ func (m *gcimports) ImportFrom(path, srcDir string, mode types2.ImportMode) (*ty
panic("mode must be 0")
}
- path, err := resolveImportPath(path)
- if err != nil {
- return nil, err
- }
-
- lookup := func(path string) (io.ReadCloser, error) { return openPackage(path) }
- return importer.Import(m.packages, path, srcDir, lookup)
+ _, pkg, err := readImportFile(path, typecheck.Target, m.check, m.packages)
+ return pkg, err
}
func isDriveLetter(b byte) bool {
@@ -175,160 +185,242 @@ func resolveImportPath(path string) (string, error) {
return path, nil
}
-// TODO(mdempsky): Return an error instead.
func importfile(decl *syntax.ImportDecl) *types.Pkg {
- if decl.Path.Kind != syntax.StringLit {
- base.Errorf("import path must be a string")
+ path, err := parseImportPath(decl.Path)
+ if err != nil {
+ base.Errorf("%s", err)
return nil
}
- path, err := strconv.Unquote(decl.Path.Value)
+ pkg, _, err := readImportFile(path, typecheck.Target, nil, nil)
if err != nil {
- base.Errorf("import path must be a string")
+ base.Errorf("%s", err)
return nil
}
+ if pkg != ir.Pkgs.Unsafe && pkg.Height >= myheight {
+ myheight = pkg.Height + 1
+ }
+ return pkg
+}
+
+func parseImportPath(pathLit *syntax.BasicLit) (string, error) {
+ if pathLit.Kind != syntax.StringLit {
+ return "", errors.New("import path must be a string")
+ }
+
+ path, err := strconv.Unquote(pathLit.Value)
+ if err != nil {
+ return "", errors.New("import path must be a string")
+ }
+
if err := checkImportPath(path, false); err != nil {
- base.Errorf("%s", err.Error())
- return nil
+ return "", err
}
+ return path, err
+}
+
+// readImportFile reads the import file for the given package path and
+// returns its types.Pkg representation. If packages is non-nil, the
+// types2.Package representation is also returned.
+func readImportFile(path string, target *ir.Package, check *types2.Checker, packages map[string]*types2.Package) (pkg1 *types.Pkg, pkg2 *types2.Package, err error) {
path, err = resolveImportPath(path)
if err != nil {
- base.Errorf("%s", err)
- return nil
+ return
+ }
+
+ if path == "unsafe" {
+ pkg1, pkg2 = ir.Pkgs.Unsafe, types2.Unsafe
+
+ // TODO(mdempsky): Investigate if this actually matters. Why would
+ // the linker or runtime care whether a package imported unsafe?
+ if !pkg1.Direct {
+ pkg1.Direct = true
+ target.Imports = append(target.Imports, pkg1)
+ }
+
+ return
}
- importpkg := types.NewPkg(path, "")
- if importpkg.Direct {
- return importpkg // already fully loaded
+ pkg1 = types.NewPkg(path, "")
+ if packages != nil {
+ pkg2 = packages[path]
+ assert(pkg1.Direct == (pkg2 != nil && pkg2.Complete()))
}
- importpkg.Direct = true
- typecheck.Target.Imports = append(typecheck.Target.Imports, importpkg)
- if path == "unsafe" {
- return importpkg // initialized with universe
+ if pkg1.Direct {
+ return
}
+ pkg1.Direct = true
+ target.Imports = append(target.Imports, pkg1)
f, err := openPackage(path)
if err != nil {
- base.Errorf("could not import %q: %v", path, err)
- base.ErrorExit()
+ return
}
- imp := bio.NewReader(f)
- defer imp.Close()
- file := f.Name()
+ defer f.Close()
- // check object header
- p, err := imp.ReadString('\n')
+ r, end, newsize, err := findExportData(f)
if err != nil {
- base.Errorf("import %s: reading input: %v", file, err)
- base.ErrorExit()
+ return
}
- if p == "!<arch>\n" { // package archive
- // package export block should be first
- sz := archive.ReadHeader(imp.Reader, "__.PKGDEF")
- if sz <= 0 {
- base.Errorf("import %s: not a package file", file)
- base.ErrorExit()
- }
- p, err = imp.ReadString('\n')
+ if base.Debug.Export != 0 {
+ fmt.Printf("importing %s (%s)\n", path, f.Name())
+ }
+
+ if newsize != 0 {
+ // We have unified IR data. Map it, and feed to the importers.
+ end -= newsize
+ var data string
+ data, err = base.MapFile(r.File(), end, newsize)
if err != nil {
- base.Errorf("import %s: reading input: %v", file, err)
- base.ErrorExit()
+ return
}
- }
- if !strings.HasPrefix(p, "go object ") {
- base.Errorf("import %s: not a go object file: %s", file, p)
- base.ErrorExit()
- }
- q := objabi.HeaderString()
- if p != q {
- base.Errorf("import %s: object is [%s] expected [%s]", file, p, q)
- base.ErrorExit()
- }
+ pkg2, err = newReadImportFunc(data, pkg1, check, packages)
+ } else {
+ // We only have old data. Oh well, fall back to the legacy importers.
+ haveLegacyImports = true
- // process header lines
- for {
- p, err = imp.ReadString('\n')
+ var c byte
+ switch c, err = r.ReadByte(); {
+ case err != nil:
+ return
+
+ case c != 'i':
+ // Indexed format is distinguished by an 'i' byte,
+ // whereas previous export formats started with 'c', 'd', or 'v'.
+ err = fmt.Errorf("unexpected package format byte: %v", c)
+ return
+ }
+
+ pos := r.Offset()
+
+ // Map string (and data) section into memory as a single large
+ // string. This reduces heap fragmentation and allows
+ // returning individual substrings very efficiently.
+ var data string
+ data, err = base.MapFile(r.File(), pos, end-pos)
if err != nil {
- base.Errorf("import %s: reading input: %v", file, err)
- base.ErrorExit()
+ return
}
- if p == "\n" {
- break // header ends with blank line
+
+ typecheck.ReadImports(pkg1, data)
+
+ if packages != nil {
+ pkg2, err = importer.ImportData(packages, data, path)
+ if err != nil {
+ return
+ }
}
}
- // Expect $$B\n to signal binary import format.
+ err = addFingerprint(path, f, end)
+ return
+}
+
+// findExportData returns a *bio.Reader positioned at the start of the
+// binary export data section, and a file offset for where to stop
+// reading.
+func findExportData(f *os.File) (r *bio.Reader, end, newsize int64, err error) {
+ r = bio.NewReader(f)
+
+ // check object header
+ line, err := r.ReadString('\n')
+ if err != nil {
+ return
+ }
- // look for $$
- var c byte
- for {
- c, err = imp.ReadByte()
+ if line == "!<arch>\n" { // package archive
+ // package export block should be first
+ sz := int64(archive.ReadHeader(r.Reader, "__.PKGDEF"))
+ if sz <= 0 {
+ err = errors.New("not a package file")
+ return
+ }
+ end = r.Offset() + sz
+ line, err = r.ReadString('\n')
if err != nil {
- break
+ return
}
- if c == '$' {
- c, err = imp.ReadByte()
- if c == '$' || err != nil {
- break
- }
+ } else {
+ // Not an archive; provide end of file instead.
+ // TODO(mdempsky): I don't think this happens anymore.
+ var fi os.FileInfo
+ fi, err = f.Stat()
+ if err != nil {
+ return
}
+ end = fi.Size()
}
- // get character after $$
- if err == nil {
- c, _ = imp.ReadByte()
+ if !strings.HasPrefix(line, "go object ") {
+ err = fmt.Errorf("not a go object file: %s", line)
+ return
+ }
+ if expect := objabi.HeaderString(); line != expect {
+ err = fmt.Errorf("object is [%s] expected [%s]", line, expect)
+ return
}
- var fingerprint goobj.FingerprintType
- switch c {
- case '\n':
- base.Errorf("cannot import %s: old export format no longer supported (recompile library)", path)
- return nil
-
- case 'B':
- if base.Debug.Export != 0 {
- fmt.Printf("importing %s (%s)\n", path, file)
+ // process header lines
+ for !strings.HasPrefix(line, "$$") {
+ if strings.HasPrefix(line, "newexportsize ") {
+ fields := strings.Fields(line)
+ newsize, err = strconv.ParseInt(fields[1], 10, 64)
+ if err != nil {
+ return
+ }
}
- imp.ReadByte() // skip \n after $$B
- c, err = imp.ReadByte()
+ line, err = r.ReadString('\n')
if err != nil {
- base.Errorf("import %s: reading input: %v", file, err)
- base.ErrorExit()
+ return
}
+ }
- // Indexed format is distinguished by an 'i' byte,
- // whereas previous export formats started with 'c', 'd', or 'v'.
- if c != 'i' {
- base.Errorf("import %s: unexpected package format byte: %v", file, c)
- base.ErrorExit()
- }
- fingerprint = typecheck.ReadImports(importpkg, imp)
+ // Expect $$B\n to signal binary import format.
+ if line != "$$B\n" {
+ err = errors.New("old export format no longer supported (recompile library)")
+ return
+ }
+
+ return
+}
+
+// addFingerprint reads the linker fingerprint included at the end of
+// the exportdata.
+func addFingerprint(path string, f *os.File, end int64) error {
+ const eom = "\n$$\n"
+ var fingerprint goobj.FingerprintType
+
+ var buf [len(fingerprint) + len(eom)]byte
+ if _, err := f.ReadAt(buf[:], end-int64(len(buf))); err != nil {
+ return err
+ }
- default:
- base.Errorf("no import in %q", path)
- base.ErrorExit()
+ // Caller should have given us the end position of the export data,
+ // which should end with the "\n$$\n" marker. As a consistency check
+ // to make sure we're reading at the right offset, make sure we
+ // found the marker.
+ if s := string(buf[len(fingerprint):]); s != eom {
+ return fmt.Errorf("expected $$ marker, but found %q", s)
}
+ copy(fingerprint[:], buf[:])
+
// assume files move (get installed) so don't record the full path
if base.Flag.Cfg.PackageFile != nil {
// If using a packageFile map, assume path_ can be recorded directly.
base.Ctxt.AddImport(path, fingerprint)
} else {
// For file "/Users/foo/go/pkg/darwin_amd64/math.a" record "math.a".
+ file := f.Name()
base.Ctxt.AddImport(file[len(file)-len(path)-len(".a"):], fingerprint)
}
-
- if importpkg.Height >= myheight {
- myheight = importpkg.Height + 1
- }
-
- return importpkg
+ return nil
}
// The linker uses the magic symbol prefixes "go." and "type."
@@ -431,7 +523,7 @@ func clearImports() {
s.Def = nil
continue
}
- if types.IsDotAlias(s) {
+ if s.Def != nil && s.Def.Sym() != s {
// throw away top-level name left over
// from previous import . "x"
// We'll report errors after type checking in CheckDotImports.
diff --git a/src/cmd/compile/internal/noder/irgen.go b/src/cmd/compile/internal/noder/irgen.go
index 3e0d3285ab..29882eb773 100644
--- a/src/cmd/compile/internal/noder/irgen.go
+++ b/src/cmd/compile/internal/noder/irgen.go
@@ -18,9 +18,9 @@ import (
"cmd/internal/src"
)
-// check2 type checks a Go package using types2, and then generates IR
-// using the results.
-func check2(noders []*noder) {
+// checkFiles configures and runs the types2 checker on the given
+// parsed source files and then returns the result.
+func checkFiles(noders []*noder) (posMap, *types2.Package, *types2.Info) {
if base.SyntaxErrors() != 0 {
base.ErrorExit()
}
@@ -34,20 +34,22 @@ func check2(noders []*noder) {
}
// typechecking
+ importer := gcimports{
+ packages: make(map[string]*types2.Package),
+ }
conf := types2.Config{
GoVersion: base.Flag.Lang,
IgnoreLabels: true, // parser already checked via syntax.CheckBranches mode
CompilerErrorMessages: true, // use error strings matching existing compiler errors
+ AllowTypeLists: true, // remove this line once all tests use type set syntax
Error: func(err error) {
terr := err.(types2.Error)
base.ErrorfAt(m.makeXPos(terr.Pos), "%s", terr.Msg)
},
- Importer: &gcimports{
- packages: make(map[string]*types2.Package),
- },
- Sizes: &gcSizes{},
+ Importer: &importer,
+ Sizes: &gcSizes{},
}
- info := types2.Info{
+ info := &types2.Info{
Types: make(map[syntax.Expr]types2.TypeAndValue),
Defs: make(map[*syntax.Name]types2.Object),
Uses: make(map[*syntax.Name]types2.Object),
@@ -57,12 +59,24 @@ func check2(noders []*noder) {
Inferred: make(map[syntax.Expr]types2.Inferred),
// expand as needed
}
- pkg, err := conf.Check(base.Ctxt.Pkgpath, files, &info)
- files = nil
+
+ pkg := types2.NewPackage(base.Ctxt.Pkgpath, "")
+ importer.check = types2.NewChecker(&conf, pkg, info)
+ err := importer.check.Files(files)
+
base.ExitIfErrors()
if err != nil {
base.FatalfAt(src.NoXPos, "conf.Check error: %v", err)
}
+
+ return m, pkg, info
+}
+
+// check2 type checks a Go package using types2, and then generates IR
+// using the results.
+func check2(noders []*noder) {
+ m, pkg, info := checkFiles(noders)
+
if base.Flag.G < 2 {
os.Exit(0)
}
@@ -70,7 +84,7 @@ func check2(noders []*noder) {
g := irgen{
target: typecheck.Target,
self: pkg,
- info: &info,
+ info: info,
posMap: m,
objs: make(map[types2.Object]*ir.Name),
typs: make(map[types2.Type]*types.Type),
@@ -82,6 +96,41 @@ func check2(noders []*noder) {
}
}
+// gfInfo is information gathered on a generic function.
+type gfInfo struct {
+ tparams []*types.Type
+ derivedTypes []*types.Type
+ // Nodes in generic function that requires a subdictionary. Includes
+ // method and function calls (OCALL), function values (OFUNCINST), method
+ // values/expressions (OXDOT).
+ subDictCalls []ir.Node
+ // Nodes in generic functions that are a conversion from a typeparam/derived
+ // type to a specific interface.
+ itabConvs []ir.Node
+ // For type switches on nonempty interfaces, a map from OTYPE entries of
+ // HasTParam type, to the interface type we're switching from.
+ // TODO: what if the type we're switching from is a shape type?
+ type2switchType map[ir.Node]*types.Type
+}
+
+// instInfo is information gathered on an gcshape (or fully concrete)
+// instantiation of a function.
+type instInfo struct {
+ fun *ir.Func // The instantiated function (with body)
+ dictParam *ir.Name // The node inside fun that refers to the dictionary param
+
+ gf *ir.Name // The associated generic function
+ gfInfo *gfInfo
+
+ startSubDict int // Start of dict entries for subdictionaries
+ startItabConv int // Start of dict entries for itab conversions
+ dictLen int // Total number of entries in dictionary
+
+ // Map from nodes in instantiated fun (OCALL, OCALLMETHOD, OFUNCINST, and
+ // OMETHEXPR) to the associated dictionary entry for a sub-dictionary
+ dictEntryMap map[ir.Node]int
+}
+
type irgen struct {
target *ir.Package
self *types2.Package
@@ -92,12 +141,52 @@ type irgen struct {
typs map[types2.Type]*types.Type
marker dwarfgen.ScopeMarker
+ // laterFuncs records tasks that need to run after all declarations
+ // are processed.
+ laterFuncs []func()
+
+ // exprStmtOK indicates whether it's safe to generate expressions or
+ // statements yet.
+ exprStmtOK bool
+
// Fully-instantiated generic types whose methods should be instantiated
instTypeList []*types.Type
+
+ dnum int // for generating unique dictionary variables
+
+ // Map from generic function to information about its type params, derived
+ // types, and subdictionaries.
+ gfInfoMap map[*types.Sym]*gfInfo
+
+ // Map from a name of function that been instantiated to information about
+ // its instantiated function, associated generic function/method, and the
+ // mapping from IR nodes to dictionary entries.
+ instInfoMap map[*types.Sym]*instInfo
+
+ // dictionary syms which we need to finish, by writing out any itabconv
+ // entries.
+ dictSymsToFinalize []*delayInfo
+
+ // True when we are compiling a top-level generic function or method. Use to
+ // avoid adding closures of generic functions/methods to the target.Decls
+ // list.
+ topFuncIsGeneric bool
+}
+
+func (g *irgen) later(fn func()) {
+ g.laterFuncs = append(g.laterFuncs, fn)
+}
+
+type delayInfo struct {
+ gf *ir.Name
+ targs []*types.Type
+ sym *types.Sym
+ off int
}
func (g *irgen) generate(noders []*noder) {
types.LocalPkg.Name = g.self.Name()
+ types.LocalPkg.Height = g.self.Height()
typecheck.TypecheckAllowed = true
// Prevent size calculations until we set the underlying type
@@ -107,7 +196,7 @@ func (g *irgen) generate(noders []*noder) {
// At this point, types2 has already handled name resolution and
// type checking. We just need to map from its object and type
// representations to those currently used by the rest of the
- // compiler. This happens mostly in 3 passes.
+ // compiler. This happens in a few passes.
// 1. Process all import declarations. We use the compiler's own
// importer for this, rather than types2's gcimporter-derived one,
@@ -132,7 +221,6 @@ Outer:
}
}
}
- types.LocalPkg.Height = myheight
// 2. Process all package-block type declarations. As with imports,
// we need to make sure all types are properly instantiated before
@@ -157,7 +245,16 @@ Outer:
// 3. Process all remaining declarations.
for _, declList := range declLists {
- g.target.Decls = append(g.target.Decls, g.decls(declList)...)
+ g.decls((*ir.Nodes)(&g.target.Decls), declList)
+ }
+ g.exprStmtOK = true
+
+ // 4. Run any "later" tasks. Avoid using 'range' so that tasks can
+ // recursively queue further tasks. (Not currently utilized though.)
+ for len(g.laterFuncs) > 0 {
+ fn := g.laterFuncs[0]
+ g.laterFuncs = g.laterFuncs[1:]
+ fn()
}
if base.Flag.W > 1 {
@@ -167,6 +264,10 @@ Outer:
}
}
+ // Check for unusual case where noder2 encounters a type error that types2
+ // doesn't check for (e.g. notinheap incompatibility).
+ base.ExitIfErrors()
+
typecheck.DeclareUniverse()
for _, p := range noders {
@@ -175,7 +276,7 @@ Outer:
// Double check for any type-checking inconsistencies. This can be
// removed once we're confident in IR generation results.
- syntax.Walk(p.file, func(n syntax.Node) bool {
+ syntax.Crawl(p.file, func(n syntax.Node) bool {
g.validate(n)
return false
})
@@ -184,9 +285,9 @@ Outer:
// Create any needed stencils of generic functions
g.stencil()
- // For now, remove all generic functions from g.target.Decl, since they
- // have been used for stenciling, but don't compile. TODO: We will
- // eventually export any exportable generic functions.
+ // Remove all generic functions from g.target.Decl, since they have been
+ // used for stenciling, but don't compile. Generic functions will already
+ // have been marked for export as appropriate.
j := 0
for i, decl := range g.target.Decls {
if decl.Op() != ir.ODCLFUNC || !decl.Type().HasTParam() {
@@ -195,6 +296,8 @@ Outer:
}
}
g.target.Decls = g.target.Decls[:j]
+
+ base.Assertf(len(g.laterFuncs) == 0, "still have %d later funcs", len(g.laterFuncs))
}
func (g *irgen) unhandled(what string, p poser) {
diff --git a/src/cmd/compile/internal/noder/linker.go b/src/cmd/compile/internal/noder/linker.go
new file mode 100644
index 0000000000..2bc7f7c608
--- /dev/null
+++ b/src/cmd/compile/internal/noder/linker.go
@@ -0,0 +1,296 @@
+// UNREVIEWED
+
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package noder
+
+import (
+ "io"
+
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/reflectdata"
+ "cmd/compile/internal/types"
+ "cmd/internal/goobj"
+ "cmd/internal/obj"
+)
+
+// This file implements the unified IR linker, which combines the
+// local package's stub data with imported package data to produce a
+// complete export data file. It also rewrites the compiler's
+// extension data sections based on the results of compilation (e.g.,
+// the function inlining cost and linker symbol index assignments).
+//
+// TODO(mdempsky): Using the name "linker" here is confusing, because
+// readers are likely to mistake references to it for cmd/link. But
+// there's a shortage of good names for "something that combines
+// multiple parts into a cohesive whole"... e.g., "assembler" and
+// "compiler" are also already taken.
+
+type linker struct {
+ pw pkgEncoder
+
+ pkgs map[string]int
+ decls map[*types.Sym]int
+}
+
+func (l *linker) relocAll(pr *pkgReader, relocs []relocEnt) []relocEnt {
+ res := make([]relocEnt, len(relocs))
+ for i, rent := range relocs {
+ rent.idx = l.relocIdx(pr, rent.kind, rent.idx)
+ res[i] = rent
+ }
+ return res
+}
+
+func (l *linker) relocIdx(pr *pkgReader, k reloc, idx int) int {
+ assert(pr != nil)
+
+ absIdx := pr.absIdx(k, idx)
+
+ if newidx := pr.newindex[absIdx]; newidx != 0 {
+ return ^newidx
+ }
+
+ var newidx int
+ switch k {
+ case relocString:
+ newidx = l.relocString(pr, idx)
+ case relocPkg:
+ newidx = l.relocPkg(pr, idx)
+ case relocObj:
+ newidx = l.relocObj(pr, idx)
+
+ default:
+ // Generic relocations.
+ //
+ // TODO(mdempsky): Deduplicate more sections? In fact, I think
+ // every section could be deduplicated. This would also be easier
+ // if we do external relocations.
+
+ w := l.pw.newEncoderRaw(k)
+ l.relocCommon(pr, &w, k, idx)
+ newidx = w.idx
+ }
+
+ pr.newindex[absIdx] = ^newidx
+
+ return newidx
+}
+
+func (l *linker) relocString(pr *pkgReader, idx int) int {
+ return l.pw.stringIdx(pr.stringIdx(idx))
+}
+
+func (l *linker) relocPkg(pr *pkgReader, idx int) int {
+ path := pr.peekPkgPath(idx)
+
+ if newidx, ok := l.pkgs[path]; ok {
+ return newidx
+ }
+
+ r := pr.newDecoder(relocPkg, idx, syncPkgDef)
+ w := l.pw.newEncoder(relocPkg, syncPkgDef)
+ l.pkgs[path] = w.idx
+
+ // TODO(mdempsky): We end up leaving an empty string reference here
+ // from when the package was originally written as "". Probably not
+ // a big deal, but a little annoying. Maybe relocating
+ // cross-references in place is the way to go after all.
+ w.relocs = l.relocAll(pr, r.relocs)
+
+ _ = r.string() // original path
+ w.string(path)
+
+ io.Copy(&w.data, &r.data)
+
+ return w.flush()
+}
+
+func (l *linker) relocObj(pr *pkgReader, idx int) int {
+ path, name, tag := pr.peekObj(idx)
+ sym := types.NewPkg(path, "").Lookup(name)
+
+ if newidx, ok := l.decls[sym]; ok {
+ return newidx
+ }
+
+ if tag == objStub && path != "builtin" && path != "unsafe" {
+ pri, ok := objReader[sym]
+ if !ok {
+ base.Fatalf("missing reader for %q.%v", path, name)
+ }
+ assert(ok)
+
+ pr = pri.pr
+ idx = pri.idx
+
+ path2, name2, tag2 := pr.peekObj(idx)
+ sym2 := types.NewPkg(path2, "").Lookup(name2)
+ assert(sym == sym2)
+ assert(tag2 != objStub)
+ }
+
+ w := l.pw.newEncoderRaw(relocObj)
+ wext := l.pw.newEncoderRaw(relocObjExt)
+ wname := l.pw.newEncoderRaw(relocName)
+ wdict := l.pw.newEncoderRaw(relocObjDict)
+
+ l.decls[sym] = w.idx
+ assert(wext.idx == w.idx)
+ assert(wname.idx == w.idx)
+ assert(wdict.idx == w.idx)
+
+ l.relocCommon(pr, &w, relocObj, idx)
+ l.relocCommon(pr, &wname, relocName, idx)
+ l.relocCommon(pr, &wdict, relocObjDict, idx)
+
+ var obj *ir.Name
+ if path == "" {
+ var ok bool
+ obj, ok = sym.Def.(*ir.Name)
+
+ // Generic types and functions and declared constraint types won't
+ // have definitions.
+ // For now, just generically copy their extension data.
+ // TODO(mdempsky): Restore assertion.
+ if !ok && false {
+ base.Fatalf("missing definition for %v", sym)
+ }
+ }
+
+ if obj != nil {
+ wext.sync(syncObject1)
+ switch tag {
+ case objFunc:
+ l.relocFuncExt(&wext, obj)
+ case objType:
+ l.relocTypeExt(&wext, obj)
+ case objVar:
+ l.relocVarExt(&wext, obj)
+ }
+ wext.flush()
+ } else {
+ l.relocCommon(pr, &wext, relocObjExt, idx)
+ }
+
+ return w.idx
+}
+
+func (l *linker) relocCommon(pr *pkgReader, w *encoder, k reloc, idx int) {
+ r := pr.newDecoderRaw(k, idx)
+ w.relocs = l.relocAll(pr, r.relocs)
+ io.Copy(&w.data, &r.data)
+ w.flush()
+}
+
+func (l *linker) pragmaFlag(w *encoder, pragma ir.PragmaFlag) {
+ w.sync(syncPragma)
+ w.int(int(pragma))
+}
+
+func (l *linker) relocFuncExt(w *encoder, name *ir.Name) {
+ w.sync(syncFuncExt)
+
+ l.pragmaFlag(w, name.Func.Pragma)
+ l.linkname(w, name)
+
+ // Relocated extension data.
+ w.bool(true)
+
+ // Record definition ABI so cross-ABI calls can be direct.
+ // This is important for the performance of calling some
+ // common functions implemented in assembly (e.g., bytealg).
+ w.uint64(uint64(name.Func.ABI))
+
+ // Escape analysis.
+ for _, fs := range &types.RecvsParams {
+ for _, f := range fs(name.Type()).FieldSlice() {
+ w.string(f.Note)
+ }
+ }
+
+ if inl := name.Func.Inl; w.bool(inl != nil) {
+ w.len(int(inl.Cost))
+ w.bool(inl.CanDelayResults)
+
+ pri, ok := bodyReader[name.Func]
+ assert(ok)
+ w.reloc(relocBody, l.relocIdx(pri.pr, relocBody, pri.idx))
+ }
+
+ w.sync(syncEOF)
+}
+
+func (l *linker) relocTypeExt(w *encoder, name *ir.Name) {
+ w.sync(syncTypeExt)
+
+ typ := name.Type()
+
+ l.pragmaFlag(w, name.Pragma())
+
+ // For type T, export the index of type descriptor symbols of T and *T.
+ l.lsymIdx(w, "", reflectdata.TypeLinksym(typ))
+ l.lsymIdx(w, "", reflectdata.TypeLinksym(typ.PtrTo()))
+
+ if typ.Kind() != types.TINTER {
+ for _, method := range typ.Methods().Slice() {
+ l.relocFuncExt(w, method.Nname.(*ir.Name))
+ }
+ }
+}
+
+func (l *linker) relocVarExt(w *encoder, name *ir.Name) {
+ w.sync(syncVarExt)
+ l.linkname(w, name)
+}
+
+func (l *linker) linkname(w *encoder, name *ir.Name) {
+ w.sync(syncLinkname)
+
+ linkname := name.Sym().Linkname
+ if !l.lsymIdx(w, linkname, name.Linksym()) {
+ w.string(linkname)
+ }
+}
+
+func (l *linker) lsymIdx(w *encoder, linkname string, lsym *obj.LSym) bool {
+ if lsym.PkgIdx > goobj.PkgIdxSelf || (lsym.PkgIdx == goobj.PkgIdxInvalid && !lsym.Indexed()) || linkname != "" {
+ w.int64(-1)
+ return false
+ }
+
+ // For a defined symbol, export its index.
+ // For re-exporting an imported symbol, pass its index through.
+ w.int64(int64(lsym.SymIdx))
+ return true
+}
+
+// @@@ Helpers
+
+// TODO(mdempsky): These should probably be removed. I think they're a
+// smell that the export data format is not yet quite right.
+
+func (pr *pkgDecoder) peekPkgPath(idx int) string {
+ r := pr.newDecoder(relocPkg, idx, syncPkgDef)
+ path := r.string()
+ if path == "" {
+ path = pr.pkgPath
+ }
+ return path
+}
+
+func (pr *pkgDecoder) peekObj(idx int) (string, string, codeObj) {
+ r := pr.newDecoder(relocName, idx, syncObject1)
+ r.sync(syncSym)
+ r.sync(syncPkg)
+ path := pr.peekPkgPath(r.reloc(relocPkg))
+ name := r.string()
+ assert(name != "")
+
+ tag := codeObj(r.code(syncCodeObj))
+
+ return path, name, tag
+}
diff --git a/src/cmd/compile/internal/noder/noder.go b/src/cmd/compile/internal/noder/noder.go
index 5fcad096c2..2b67a91b3f 100644
--- a/src/cmd/compile/internal/noder/noder.go
+++ b/src/cmd/compile/internal/noder/noder.go
@@ -5,9 +5,11 @@
package noder
import (
+ "errors"
"fmt"
"go/constant"
"go/token"
+ "internal/buildcfg"
"os"
"path/filepath"
"runtime"
@@ -29,8 +31,11 @@ import (
func LoadPackage(filenames []string) {
base.Timer.Start("fe", "parse")
+ // -G=3 and unified expect generics syntax, but -G=0 does not.
+ supportsGenerics := base.Flag.G != 0 || buildcfg.Experiment.Unified
+
mode := syntax.CheckBranches
- if base.Flag.G != 0 {
+ if supportsGenerics && types.AllowsGoVersion(types.LocalPkg, 1, 18) {
mode |= syntax.AllowGenerics
}
@@ -75,6 +80,11 @@ func LoadPackage(filenames []string) {
}
base.Timer.AddEvent(int64(lines), "lines")
+ if base.Debug.Unified != 0 {
+ unified(noders)
+ return
+ }
+
if base.Flag.G != 0 {
// Use types2 to type-check and possibly generate IR.
check2(noders)
@@ -109,25 +119,35 @@ func LoadPackage(filenames []string) {
// We also defer type alias declarations until phase 2
// to avoid cycles like #18640.
// TODO(gri) Remove this again once we have a fix for #25838.
-
- // Don't use range--typecheck can add closures to Target.Decls.
- base.Timer.Start("fe", "typecheck", "top1")
- for i := 0; i < len(typecheck.Target.Decls); i++ {
- n := typecheck.Target.Decls[i]
- if op := n.Op(); op != ir.ODCL && op != ir.OAS && op != ir.OAS2 && (op != ir.ODCLTYPE || !n.(*ir.Decl).X.Alias()) {
- typecheck.Target.Decls[i] = typecheck.Stmt(n)
- }
- }
-
+ //
// Phase 2: Variable assignments.
// To check interface assignments, depends on phase 1.
// Don't use range--typecheck can add closures to Target.Decls.
- base.Timer.Start("fe", "typecheck", "top2")
- for i := 0; i < len(typecheck.Target.Decls); i++ {
- n := typecheck.Target.Decls[i]
- if op := n.Op(); op == ir.ODCL || op == ir.OAS || op == ir.OAS2 || op == ir.ODCLTYPE && n.(*ir.Decl).X.Alias() {
- typecheck.Target.Decls[i] = typecheck.Stmt(n)
+ for phase, name := range []string{"top1", "top2"} {
+ base.Timer.Start("fe", "typecheck", name)
+ for i := 0; i < len(typecheck.Target.Decls); i++ {
+ n := typecheck.Target.Decls[i]
+ op := n.Op()
+
+ // Closure function declarations are typechecked as part of the
+ // closure expression.
+ if fn, ok := n.(*ir.Func); ok && fn.OClosure != nil {
+ continue
+ }
+
+ // We don't actually add ir.ODCL nodes to Target.Decls. Make sure of that.
+ if op == ir.ODCL {
+ base.FatalfAt(n.Pos(), "unexpected top declaration: %v", op)
+ }
+
+ // Identify declarations that should be deferred to the second
+ // iteration.
+ late := op == ir.OAS || op == ir.OAS2 || op == ir.ODCLTYPE && n.(*ir.Decl).X.Alias()
+
+ if late == (phase == 1) {
+ typecheck.Target.Decls[i] = typecheck.Stmt(n)
+ }
}
}
@@ -136,16 +156,15 @@ func LoadPackage(filenames []string) {
base.Timer.Start("fe", "typecheck", "func")
var fcount int64
for i := 0; i < len(typecheck.Target.Decls); i++ {
- n := typecheck.Target.Decls[i]
- if n.Op() == ir.ODCLFUNC {
+ if fn, ok := typecheck.Target.Decls[i].(*ir.Func); ok {
if base.Flag.W > 1 {
- s := fmt.Sprintf("\nbefore typecheck %v", n)
- ir.Dump(s, n)
+ s := fmt.Sprintf("\nbefore typecheck %v", fn)
+ ir.Dump(s, fn)
}
- typecheck.FuncBody(n.(*ir.Func))
+ typecheck.FuncBody(fn)
if base.Flag.W > 1 {
- s := fmt.Sprintf("\nafter typecheck %v", n)
- ir.Dump(s, n)
+ s := fmt.Sprintf("\nafter typecheck %v", fn)
+ ir.Dump(s, fn)
}
fcount++
}
@@ -172,13 +191,23 @@ func (p *noder) errorAt(pos syntax.Pos, format string, args ...interface{}) {
base.ErrorfAt(p.makeXPos(pos), format, args...)
}
-// TODO(gri) Can we eliminate fileh in favor of absFilename?
-func fileh(name string) string {
- return objabi.AbsFile("", name, base.Flag.TrimPath)
-}
-
-func absFilename(name string) string {
- return objabi.AbsFile(base.Ctxt.Pathname, name, base.Flag.TrimPath)
+// trimFilename returns the "trimmed" filename of b, which is the
+// absolute filename after applying -trimpath processing. This
+// filename form is suitable for use in object files and export data.
+//
+// If b's filename has already been trimmed (i.e., because it was read
+// in from an imported package's export data), then the filename is
+// returned unchanged.
+func trimFilename(b *syntax.PosBase) string {
+ filename := b.Filename()
+ if !b.Trimmed() {
+ dir := ""
+ if b.IsFileBase() {
+ dir = base.Ctxt.Pathname
+ }
+ filename = objabi.AbsFile(dir, filename, base.Flag.TrimPath)
+ }
+ return filename
}
// noder transforms package syntax's AST into a Node tree.
@@ -449,7 +478,7 @@ func (p *noder) varDecl(decl *syntax.VarDecl) []ir.Node {
type constState struct {
group *syntax.Group
typ ir.Ntype
- values []ir.Node
+ values syntax.Expr
iota int64
}
@@ -467,16 +496,15 @@ func (p *noder) constDecl(decl *syntax.ConstDecl, cs *constState) []ir.Node {
names := p.declNames(ir.OLITERAL, decl.NameList)
typ := p.typeExprOrNil(decl.Type)
- var values []ir.Node
if decl.Values != nil {
- values = p.exprList(decl.Values)
- cs.typ, cs.values = typ, values
+ cs.typ, cs.values = typ, decl.Values
} else {
if typ != nil {
base.Errorf("const declaration cannot have type without expression")
}
- typ, values = cs.typ, cs.values
+ typ = cs.typ
}
+ values := p.exprList(cs.values)
nn := make([]ir.Node, 0, len(names))
for i, n := range names {
@@ -484,10 +512,16 @@ func (p *noder) constDecl(decl *syntax.ConstDecl, cs *constState) []ir.Node {
base.Errorf("missing value in const declaration")
break
}
+
v := values[i]
if decl.Values == nil {
- v = ir.DeepCopy(n.Pos(), v)
+ ir.Visit(v, func(v ir.Node) {
+ if ir.HasUniquePos(v) {
+ v.SetPos(n.Pos())
+ }
+ })
}
+
typecheck.Declare(n, typecheck.DeclContext)
n.Ntype = typ
@@ -625,6 +659,9 @@ func (p *noder) params(params []*syntax.Field, dddOk bool) []*ir.Field {
for i, param := range params {
p.setlineno(param)
nodes = append(nodes, p.param(param, dddOk, i+1 == len(params)))
+ if i > 0 && params[i].Type == params[i-1].Type {
+ nodes[i].Ntype = nodes[i-1].Ntype
+ }
}
return nodes
}
@@ -914,6 +951,9 @@ func (p *noder) structType(expr *syntax.StructType) ir.Node {
} else {
n = ir.NewField(p.pos(field), p.name(field.Name), p.typeExpr(field.Type), nil)
}
+ if i > 0 && expr.FieldList[i].Type == expr.FieldList[i-1].Type {
+ n.Ntype = l[i-1].Ntype
+ }
if i < len(expr.TagList) && expr.TagList[i] != nil {
n.Note = constant.StringVal(p.basicLit(expr.TagList[i]))
}
@@ -977,6 +1017,8 @@ func (p *noder) packname(expr syntax.Expr) *types.Sym {
}
func (p *noder) embedded(typ syntax.Expr) *ir.Field {
+ pos := p.pos(syntax.StartPos(typ))
+
op, isStar := typ.(*syntax.Operation)
if isStar {
if op.Op != syntax.Mul || op.Y != nil {
@@ -986,11 +1028,11 @@ func (p *noder) embedded(typ syntax.Expr) *ir.Field {
}
sym := p.packname(typ)
- n := ir.NewField(p.pos(typ), typecheck.Lookup(sym.Name), importName(sym).(ir.Ntype), nil)
+ n := ir.NewField(pos, typecheck.Lookup(sym.Name), importName(sym).(ir.Ntype), nil)
n.Embedded = true
if isStar {
- n.Ntype = ir.NewStarExpr(p.pos(op), n.Ntype)
+ n.Ntype = ir.NewStarExpr(pos, n.Ntype)
}
return n
}
@@ -1691,7 +1733,7 @@ func (p *noder) pragma(pos syntax.Pos, blankLine bool, text string, old syntax.P
// (primarily misuse of linker flags), other files are not.
// See golang.org/issue/23672.
func isCgoGeneratedFile(pos syntax.Pos) bool {
- return strings.HasPrefix(filepath.Base(filepath.Clean(fileh(pos.Base().Filename()))), "_cgo_")
+ return strings.HasPrefix(filepath.Base(trimFilename(pos.Base())), "_cgo_")
}
// safeArg reports whether arg is a "safe" command-line argument,
@@ -1780,24 +1822,14 @@ func fakeRecv() *ir.Field {
}
func (p *noder) funcLit(expr *syntax.FuncLit) ir.Node {
- xtype := p.typeExpr(expr.Type)
-
- fn := ir.NewFunc(p.pos(expr))
- fn.SetIsHiddenClosure(ir.CurFunc != nil)
-
- fn.Nname = ir.NewNameAt(p.pos(expr), ir.BlankNode.Sym()) // filled in by tcClosure
- fn.Nname.Func = fn
- fn.Nname.Ntype = xtype
- fn.Nname.Defn = fn
-
- clo := ir.NewClosureExpr(p.pos(expr), fn)
- fn.OClosure = clo
+ fn := ir.NewClosureFunc(p.pos(expr), ir.CurFunc != nil)
+ fn.Nname.Ntype = p.typeExpr(expr.Type)
p.funcBody(fn, expr.Body)
ir.FinishCaptureNames(base.Pos, ir.CurFunc, fn)
- return clo
+ return fn.OClosure
}
// A function named init is a special case.
@@ -1841,33 +1873,14 @@ func oldname(s *types.Sym) ir.Node {
}
func varEmbed(makeXPos func(syntax.Pos) src.XPos, name *ir.Name, decl *syntax.VarDecl, pragma *pragmas, haveEmbed bool) {
- if pragma.Embeds == nil {
- return
- }
-
pragmaEmbeds := pragma.Embeds
pragma.Embeds = nil
- pos := makeXPos(pragmaEmbeds[0].Pos)
-
- if !haveEmbed {
- base.ErrorfAt(pos, "go:embed only allowed in Go files that import \"embed\"")
+ if len(pragmaEmbeds) == 0 {
return
}
- if len(decl.NameList) > 1 {
- base.ErrorfAt(pos, "go:embed cannot apply to multiple vars")
- return
- }
- if decl.Values != nil {
- base.ErrorfAt(pos, "go:embed cannot apply to var with initializer")
- return
- }
- if decl.Type == nil {
- // Should not happen, since Values == nil now.
- base.ErrorfAt(pos, "go:embed cannot apply to var without type")
- return
- }
- if typecheck.DeclContext != ir.PEXTERN {
- base.ErrorfAt(pos, "go:embed cannot apply to var inside func")
+
+ if err := checkEmbed(decl, haveEmbed, typecheck.DeclContext != ir.PEXTERN); err != nil {
+ base.ErrorfAt(makeXPos(pragmaEmbeds[0].Pos), "%s", err)
return
}
@@ -1878,3 +1891,24 @@ func varEmbed(makeXPos func(syntax.Pos) src.XPos, name *ir.Name, decl *syntax.Va
typecheck.Target.Embeds = append(typecheck.Target.Embeds, name)
name.Embed = &embeds
}
+
+func checkEmbed(decl *syntax.VarDecl, haveEmbed, withinFunc bool) error {
+ switch {
+ case !haveEmbed:
+ return errors.New("go:embed only allowed in Go files that import \"embed\"")
+ case len(decl.NameList) > 1:
+ return errors.New("go:embed cannot apply to multiple vars")
+ case decl.Values != nil:
+ return errors.New("go:embed cannot apply to var with initializer")
+ case decl.Type == nil:
+ // Should not happen, since Values == nil now.
+ return errors.New("go:embed cannot apply to var without type")
+ case withinFunc:
+ return errors.New("go:embed cannot apply to var inside func")
+ case !types.AllowsGoVersion(types.LocalPkg, 1, 16):
+ return fmt.Errorf("go:embed requires go1.16 or later (-lang was set to %s; check go.mod)", base.Flag.Lang)
+
+ default:
+ return nil
+ }
+}
diff --git a/src/cmd/compile/internal/noder/object.go b/src/cmd/compile/internal/noder/object.go
index 82cce1ace0..40c0b9cf42 100644
--- a/src/cmd/compile/internal/noder/object.go
+++ b/src/cmd/compile/internal/noder/object.go
@@ -29,7 +29,7 @@ func (g *irgen) use(name *syntax.Name) *ir.Name {
if !ok {
base.FatalfAt(g.pos(name), "unknown name %v", name)
}
- obj := ir.CaptureName(g.pos(obj2), ir.CurFunc, g.obj(obj2))
+ obj := ir.CaptureName(g.pos(name), ir.CurFunc, g.obj(obj2))
if obj.Defn != nil && obj.Defn.Op() == ir.ONAME {
// If CaptureName created a closure variable, then transfer the
// type of the captured name to the new closure variable.
@@ -49,6 +49,11 @@ func (g *irgen) obj(obj types2.Object) *ir.Name {
// For imported objects, we use iimport directly instead of mapping
// the types2 representation.
if obj.Pkg() != g.self {
+ if sig, ok := obj.Type().(*types2.Signature); ok && sig.Recv() != nil {
+ // We can't import a method by name - must import the type
+ // and access the method from it.
+ base.FatalfAt(g.pos(obj), "tried to import a method directly")
+ }
sym := g.sym(obj)
if sym.Def != nil {
return sym.Def.(*ir.Name)
@@ -101,25 +106,28 @@ func (g *irgen) obj(obj types2.Object) *ir.Name {
case *types2.TypeName:
if obj.IsAlias() {
name = g.objCommon(pos, ir.OTYPE, g.sym(obj), class, g.typ(obj.Type()))
+ name.SetAlias(true)
} else {
name = ir.NewDeclNameAt(pos, ir.OTYPE, g.sym(obj))
g.objFinish(name, class, types.NewNamed(name))
}
case *types2.Var:
- var sym *types.Sym
- if class == ir.PPARAMOUT {
+ sym := g.sym(obj)
+ if class == ir.PPARAMOUT && (sym == nil || sym.IsBlank()) {
// Backend needs names for result parameters,
// even if they're anonymous or blank.
- switch obj.Name() {
- case "":
- sym = typecheck.LookupNum("~r", len(ir.CurFunc.Dcl)) // 'r' for "result"
- case "_":
- sym = typecheck.LookupNum("~b", len(ir.CurFunc.Dcl)) // 'b' for "blank"
+ nresults := 0
+ for _, n := range ir.CurFunc.Dcl {
+ if n.Class == ir.PPARAMOUT {
+ nresults++
+ }
+ }
+ if sym == nil {
+ sym = typecheck.LookupNum("~r", nresults) // 'r' for "result"
+ } else {
+ sym = typecheck.LookupNum("~b", nresults) // 'b' for "blank"
}
- }
- if sym == nil {
- sym = g.sym(obj)
}
name = g.objCommon(pos, ir.ONAME, sym, class, g.typ(obj.Type()))
@@ -164,9 +172,8 @@ func (g *irgen) objFinish(name *ir.Name, class ir.Class, typ *types.Type) {
break // methods are exported with their receiver type
}
if types.IsExported(sym.Name) {
- if name.Class == ir.PFUNC && name.Type().NumTParams() > 0 {
- base.FatalfAt(name.Pos(), "Cannot export a generic function (yet): %v", name)
- }
+ // Generic functions can be marked for export here, even
+ // though they will not be compiled until instantiated.
typecheck.Export(name)
}
if base.Flag.AsmHdr != "" && !name.Sym().Asm() {
diff --git a/src/cmd/compile/internal/noder/posmap.go b/src/cmd/compile/internal/noder/posmap.go
index a6d3e2d7ef..f22628f845 100644
--- a/src/cmd/compile/internal/noder/posmap.go
+++ b/src/cmd/compile/internal/noder/posmap.go
@@ -45,8 +45,10 @@ func (m *posMap) makeSrcPosBase(b0 *syntax.PosBase) *src.PosBase {
b1, ok := m.bases[b0]
if !ok {
fn := b0.Filename()
+ absfn := trimFilename(b0)
+
if b0.IsFileBase() {
- b1 = src.NewFileBase(fn, absFilename(fn))
+ b1 = src.NewFileBase(fn, absfn)
} else {
// line directive base
p0 := b0.Pos()
@@ -55,7 +57,7 @@ func (m *posMap) makeSrcPosBase(b0 *syntax.PosBase) *src.PosBase {
panic("infinite recursion in makeSrcPosBase")
}
p1 := src.MakePos(m.makeSrcPosBase(p0b), p0.Line(), p0.Col())
- b1 = src.NewLinePragmaBase(p1, fn, fileh(fn), b0.Line(), b0.Col())
+ b1 = src.NewLinePragmaBase(p1, fn, absfn, b0.Line(), b0.Col())
}
if m.bases == nil {
m.bases = make(map[*syntax.PosBase]*src.PosBase)
diff --git a/src/cmd/compile/internal/noder/quirks.go b/src/cmd/compile/internal/noder/quirks.go
new file mode 100644
index 0000000000..914c5d2bd7
--- /dev/null
+++ b/src/cmd/compile/internal/noder/quirks.go
@@ -0,0 +1,450 @@
+// UNREVIEWED
+
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package noder
+
+import (
+ "fmt"
+
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/syntax"
+ "cmd/compile/internal/types2"
+ "cmd/internal/src"
+)
+
+// This file defines helper functions useful for satisfying toolstash
+// -cmp when compared against the legacy frontend behavior, but can be
+// removed after that's no longer a concern.
+
+// quirksMode controls whether behavior specific to satisfying
+// toolstash -cmp is used.
+func quirksMode() bool {
+ return base.Debug.UnifiedQuirks != 0
+}
+
+// posBasesOf returns all of the position bases in the source files,
+// as seen in a straightforward traversal.
+//
+// This is necessary to ensure position bases (and thus file names)
+// get registered in the same order as noder would visit them.
+func posBasesOf(noders []*noder) []*syntax.PosBase {
+ seen := make(map[*syntax.PosBase]bool)
+ var bases []*syntax.PosBase
+
+ for _, p := range noders {
+ syntax.Crawl(p.file, func(n syntax.Node) bool {
+ if b := n.Pos().Base(); !seen[b] {
+ bases = append(bases, b)
+ seen[b] = true
+ }
+ return false
+ })
+ }
+
+ return bases
+}
+
+// importedObjsOf returns the imported objects (i.e., referenced
+// objects not declared by curpkg) from the parsed source files, in
+// the order that typecheck used to load their definitions.
+//
+// This is needed because loading the definitions for imported objects
+// can also add file names.
+func importedObjsOf(curpkg *types2.Package, info *types2.Info, noders []*noder) []types2.Object {
+ // This code is complex because it matches the precise order that
+ // typecheck recursively and repeatedly traverses the IR. It's meant
+ // to be thrown away eventually anyway.
+
+ seen := make(map[types2.Object]bool)
+ var objs []types2.Object
+
+ var phase int
+
+ decls := make(map[types2.Object]syntax.Decl)
+ assoc := func(decl syntax.Decl, names ...*syntax.Name) {
+ for _, name := range names {
+ obj, ok := info.Defs[name]
+ assert(ok)
+ decls[obj] = decl
+ }
+ }
+
+ for _, p := range noders {
+ syntax.Crawl(p.file, func(n syntax.Node) bool {
+ switch n := n.(type) {
+ case *syntax.ConstDecl:
+ assoc(n, n.NameList...)
+ case *syntax.FuncDecl:
+ assoc(n, n.Name)
+ case *syntax.TypeDecl:
+ assoc(n, n.Name)
+ case *syntax.VarDecl:
+ assoc(n, n.NameList...)
+ case *syntax.BlockStmt:
+ return true
+ }
+ return false
+ })
+ }
+
+ var visited map[syntax.Decl]bool
+
+ var resolveDecl func(n syntax.Decl)
+ var resolveNode func(n syntax.Node, top bool)
+
+ resolveDecl = func(n syntax.Decl) {
+ if visited[n] {
+ return
+ }
+ visited[n] = true
+
+ switch n := n.(type) {
+ case *syntax.ConstDecl:
+ resolveNode(n.Type, true)
+ resolveNode(n.Values, true)
+
+ case *syntax.FuncDecl:
+ if n.Recv != nil {
+ resolveNode(n.Recv, true)
+ }
+ resolveNode(n.Type, true)
+
+ case *syntax.TypeDecl:
+ resolveNode(n.Type, true)
+
+ case *syntax.VarDecl:
+ if n.Type != nil {
+ resolveNode(n.Type, true)
+ } else {
+ resolveNode(n.Values, true)
+ }
+ }
+ }
+
+ resolveObj := func(pos syntax.Pos, obj types2.Object) {
+ switch obj.Pkg() {
+ case nil:
+ // builtin; nothing to do
+
+ case curpkg:
+ if decl, ok := decls[obj]; ok {
+ resolveDecl(decl)
+ }
+
+ default:
+ if obj.Parent() == obj.Pkg().Scope() && !seen[obj] {
+ seen[obj] = true
+ objs = append(objs, obj)
+ }
+ }
+ }
+
+ checkdefat := func(pos syntax.Pos, n *syntax.Name) {
+ if n.Value == "_" {
+ return
+ }
+ obj, ok := info.Uses[n]
+ if !ok {
+ obj, ok = info.Defs[n]
+ if !ok {
+ return
+ }
+ }
+ if obj == nil {
+ return
+ }
+ resolveObj(pos, obj)
+ }
+ checkdef := func(n *syntax.Name) { checkdefat(n.Pos(), n) }
+
+ var later []syntax.Node
+
+ resolveNode = func(n syntax.Node, top bool) {
+ if n == nil {
+ return
+ }
+ syntax.Crawl(n, func(n syntax.Node) bool {
+ switch n := n.(type) {
+ case *syntax.Name:
+ checkdef(n)
+
+ case *syntax.SelectorExpr:
+ if name, ok := n.X.(*syntax.Name); ok {
+ if _, isPkg := info.Uses[name].(*types2.PkgName); isPkg {
+ checkdefat(n.X.Pos(), n.Sel)
+ return true
+ }
+ }
+
+ case *syntax.AssignStmt:
+ resolveNode(n.Rhs, top)
+ resolveNode(n.Lhs, top)
+ return true
+
+ case *syntax.VarDecl:
+ resolveNode(n.Values, top)
+
+ case *syntax.FuncLit:
+ if top {
+ resolveNode(n.Type, top)
+ later = append(later, n.Body)
+ return true
+ }
+
+ case *syntax.BlockStmt:
+ if phase >= 3 {
+ for _, stmt := range n.List {
+ resolveNode(stmt, false)
+ }
+ }
+ return true
+ }
+
+ return false
+ })
+ }
+
+ for phase = 1; phase <= 5; phase++ {
+ visited = map[syntax.Decl]bool{}
+
+ for _, p := range noders {
+ for _, decl := range p.file.DeclList {
+ switch decl := decl.(type) {
+ case *syntax.ConstDecl:
+ resolveDecl(decl)
+
+ case *syntax.FuncDecl:
+ resolveDecl(decl)
+ if phase >= 3 && decl.Body != nil {
+ resolveNode(decl.Body, true)
+ }
+
+ case *syntax.TypeDecl:
+ if !decl.Alias || phase >= 2 {
+ resolveDecl(decl)
+ }
+
+ case *syntax.VarDecl:
+ if phase >= 2 {
+ resolveNode(decl.Values, true)
+ resolveDecl(decl)
+ }
+ }
+ }
+
+ if phase >= 5 {
+ syntax.Crawl(p.file, func(n syntax.Node) bool {
+ if name, ok := n.(*syntax.Name); ok {
+ if obj, ok := info.Uses[name]; ok {
+ resolveObj(name.Pos(), obj)
+ }
+ }
+ return false
+ })
+ }
+ }
+
+ for i := 0; i < len(later); i++ {
+ resolveNode(later[i], true)
+ }
+ later = nil
+ }
+
+ return objs
+}
+
+// typeExprEndPos returns the position that noder would leave base.Pos
+// after parsing the given type expression.
+func typeExprEndPos(expr0 syntax.Expr) syntax.Pos {
+ for {
+ switch expr := expr0.(type) {
+ case *syntax.Name:
+ return expr.Pos()
+ case *syntax.SelectorExpr:
+ return expr.X.Pos()
+
+ case *syntax.ParenExpr:
+ expr0 = expr.X
+
+ case *syntax.Operation:
+ assert(expr.Op == syntax.Mul)
+ assert(expr.Y == nil)
+ expr0 = expr.X
+
+ case *syntax.ArrayType:
+ expr0 = expr.Elem
+ case *syntax.ChanType:
+ expr0 = expr.Elem
+ case *syntax.DotsType:
+ expr0 = expr.Elem
+ case *syntax.MapType:
+ expr0 = expr.Value
+ case *syntax.SliceType:
+ expr0 = expr.Elem
+
+ case *syntax.StructType:
+ return expr.Pos()
+
+ case *syntax.InterfaceType:
+ expr0 = lastFieldType(expr.MethodList)
+ if expr0 == nil {
+ return expr.Pos()
+ }
+
+ case *syntax.FuncType:
+ expr0 = lastFieldType(expr.ResultList)
+ if expr0 == nil {
+ expr0 = lastFieldType(expr.ParamList)
+ if expr0 == nil {
+ return expr.Pos()
+ }
+ }
+
+ case *syntax.IndexExpr: // explicit type instantiation
+ targs := unpackListExpr(expr.Index)
+ expr0 = targs[len(targs)-1]
+
+ default:
+ panic(fmt.Sprintf("%s: unexpected type expression %v", expr.Pos(), syntax.String(expr)))
+ }
+ }
+}
+
+func lastFieldType(fields []*syntax.Field) syntax.Expr {
+ if len(fields) == 0 {
+ return nil
+ }
+ return fields[len(fields)-1].Type
+}
+
+// sumPos returns the position that noder.sum would produce for
+// constant expression x.
+func sumPos(x syntax.Expr) syntax.Pos {
+ orig := x
+ for {
+ switch x1 := x.(type) {
+ case *syntax.BasicLit:
+ assert(x1.Kind == syntax.StringLit)
+ return x1.Pos()
+ case *syntax.Operation:
+ assert(x1.Op == syntax.Add && x1.Y != nil)
+ if r, ok := x1.Y.(*syntax.BasicLit); ok {
+ assert(r.Kind == syntax.StringLit)
+ x = x1.X
+ continue
+ }
+ }
+ return orig.Pos()
+ }
+}
+
+// funcParamsEndPos returns the value of base.Pos left by noder after
+// processing a function signature.
+func funcParamsEndPos(fn *ir.Func) src.XPos {
+ sig := fn.Nname.Type()
+
+ fields := sig.Results().FieldSlice()
+ if len(fields) == 0 {
+ fields = sig.Params().FieldSlice()
+ if len(fields) == 0 {
+ fields = sig.Recvs().FieldSlice()
+ if len(fields) == 0 {
+ if fn.OClosure != nil {
+ return fn.Nname.Ntype.Pos()
+ }
+ return fn.Pos()
+ }
+ }
+ }
+
+ return fields[len(fields)-1].Pos
+}
+
+type dupTypes struct {
+ origs map[types2.Type]types2.Type
+}
+
+func (d *dupTypes) orig(t types2.Type) types2.Type {
+ if orig, ok := d.origs[t]; ok {
+ return orig
+ }
+ return t
+}
+
+func (d *dupTypes) add(t, orig types2.Type) {
+ if t == orig {
+ return
+ }
+
+ if d.origs == nil {
+ d.origs = make(map[types2.Type]types2.Type)
+ }
+ assert(d.origs[t] == nil)
+ d.origs[t] = orig
+
+ switch t := t.(type) {
+ case *types2.Pointer:
+ orig := orig.(*types2.Pointer)
+ d.add(t.Elem(), orig.Elem())
+
+ case *types2.Slice:
+ orig := orig.(*types2.Slice)
+ d.add(t.Elem(), orig.Elem())
+
+ case *types2.Map:
+ orig := orig.(*types2.Map)
+ d.add(t.Key(), orig.Key())
+ d.add(t.Elem(), orig.Elem())
+
+ case *types2.Array:
+ orig := orig.(*types2.Array)
+ assert(t.Len() == orig.Len())
+ d.add(t.Elem(), orig.Elem())
+
+ case *types2.Chan:
+ orig := orig.(*types2.Chan)
+ assert(t.Dir() == orig.Dir())
+ d.add(t.Elem(), orig.Elem())
+
+ case *types2.Struct:
+ orig := orig.(*types2.Struct)
+ assert(t.NumFields() == orig.NumFields())
+ for i := 0; i < t.NumFields(); i++ {
+ d.add(t.Field(i).Type(), orig.Field(i).Type())
+ }
+
+ case *types2.Interface:
+ orig := orig.(*types2.Interface)
+ assert(t.NumExplicitMethods() == orig.NumExplicitMethods())
+ assert(t.NumEmbeddeds() == orig.NumEmbeddeds())
+ for i := 0; i < t.NumExplicitMethods(); i++ {
+ d.add(t.ExplicitMethod(i).Type(), orig.ExplicitMethod(i).Type())
+ }
+ for i := 0; i < t.NumEmbeddeds(); i++ {
+ d.add(t.EmbeddedType(i), orig.EmbeddedType(i))
+ }
+
+ case *types2.Signature:
+ orig := orig.(*types2.Signature)
+ assert((t.Recv() == nil) == (orig.Recv() == nil))
+ if t.Recv() != nil {
+ d.add(t.Recv().Type(), orig.Recv().Type())
+ }
+ d.add(t.Params(), orig.Params())
+ d.add(t.Results(), orig.Results())
+
+ case *types2.Tuple:
+ orig := orig.(*types2.Tuple)
+ assert(t.Len() == orig.Len())
+ for i := 0; i < t.Len(); i++ {
+ d.add(t.At(i).Type(), orig.At(i).Type())
+ }
+
+ default:
+ assert(types2.Identical(t, orig))
+ }
+}
diff --git a/src/cmd/compile/internal/noder/reader.go b/src/cmd/compile/internal/noder/reader.go
new file mode 100644
index 0000000000..985453a1bb
--- /dev/null
+++ b/src/cmd/compile/internal/noder/reader.go
@@ -0,0 +1,2389 @@
+// UNREVIEWED
+
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package noder
+
+import (
+ "bytes"
+ "fmt"
+ "go/constant"
+ "strings"
+
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/deadcode"
+ "cmd/compile/internal/dwarfgen"
+ "cmd/compile/internal/inline"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/reflectdata"
+ "cmd/compile/internal/typecheck"
+ "cmd/compile/internal/types"
+ "cmd/internal/obj"
+ "cmd/internal/src"
+)
+
+// TODO(mdempsky): Suppress duplicate type/const errors that can arise
+// during typecheck due to naive type substitution (e.g., see #42758).
+// I anticipate these will be handled as a consequence of adding
+// dictionaries support, so it's probably not important to focus on
+// this until after that's done.
+
+type pkgReader struct {
+ pkgDecoder
+
+ posBases []*src.PosBase
+ pkgs []*types.Pkg
+ typs []*types.Type
+
+ // offset for rewriting the given index into the output,
+ // but bitwise inverted so we can detect if we're missing the entry or not.
+ newindex []int
+}
+
+func newPkgReader(pr pkgDecoder) *pkgReader {
+ return &pkgReader{
+ pkgDecoder: pr,
+
+ posBases: make([]*src.PosBase, pr.numElems(relocPosBase)),
+ pkgs: make([]*types.Pkg, pr.numElems(relocPkg)),
+ typs: make([]*types.Type, pr.numElems(relocType)),
+
+ newindex: make([]int, pr.totalElems()),
+ }
+}
+
+type pkgReaderIndex struct {
+ pr *pkgReader
+ idx int
+ dict *readerDict
+}
+
+func (pri pkgReaderIndex) asReader(k reloc, marker syncMarker) *reader {
+ r := pri.pr.newReader(k, pri.idx, marker)
+ r.dict = pri.dict
+ return r
+}
+
+func (pr *pkgReader) newReader(k reloc, idx int, marker syncMarker) *reader {
+ return &reader{
+ decoder: pr.newDecoder(k, idx, marker),
+ p: pr,
+ }
+}
+
+type reader struct {
+ decoder
+
+ p *pkgReader
+
+ ext *reader
+
+ dict *readerDict
+
+ // TODO(mdempsky): The state below is all specific to reading
+ // function bodies. It probably makes sense to split it out
+ // separately so that it doesn't take up space in every reader
+ // instance.
+
+ curfn *ir.Func
+ locals []*ir.Name
+ closureVars []*ir.Name
+
+ funarghack bool
+
+ // scopeVars is a stack tracking the number of variables declared in
+ // the current function at the moment each open scope was opened.
+ scopeVars []int
+ marker dwarfgen.ScopeMarker
+ lastCloseScopePos src.XPos
+
+ // === details for handling inline body expansion ===
+
+ // If we're reading in a function body because of inlining, this is
+ // the call that we're inlining for.
+ inlCaller *ir.Func
+ inlCall *ir.CallExpr
+ inlFunc *ir.Func
+ inlTreeIndex int
+ inlPosBases map[*src.PosBase]*src.PosBase
+
+ delayResults bool
+
+ // Label to return to.
+ retlabel *types.Sym
+
+ inlvars, retvars ir.Nodes
+}
+
+type readerDict struct {
+ // targs holds the implicit and explicit type arguments in use for
+ // reading the current object. For example:
+ //
+ // func F[T any]() {
+ // type X[U any] struct { t T; u U }
+ // var _ X[string]
+ // }
+ //
+ // var _ = F[int]
+ //
+ // While instantiating F[int], we need to in turn instantiate
+ // X[string]. [int] and [string] are explicit type arguments for F
+ // and X, respectively; but [int] is also the implicit type
+ // arguments for X.
+ //
+ // (As an analogy to function literals, explicits are the function
+ // literal's formal parameters, while implicits are variables
+ // captured by the function literal.)
+ targs []*types.Type
+
+ // implicits counts how many of types within targs are implicit type
+ // arguments; the rest are explicit.
+ implicits int
+
+ derived []derivedInfo // reloc index of the derived type's descriptor
+ derivedTypes []*types.Type // slice of previously computed derived types
+
+ funcs []objInfo
+ funcsObj []ir.Node
+}
+
+func (r *reader) setType(n ir.Node, typ *types.Type) {
+ n.SetType(typ)
+ n.SetTypecheck(1)
+
+ if name, ok := n.(*ir.Name); ok {
+ name.SetWalkdef(1)
+ name.Ntype = ir.TypeNode(name.Type())
+ }
+}
+
+func (r *reader) setValue(name *ir.Name, val constant.Value) {
+ name.SetVal(val)
+ name.Defn = nil
+}
+
+// @@@ Positions
+
+func (r *reader) pos() src.XPos {
+ return base.Ctxt.PosTable.XPos(r.pos0())
+}
+
+func (r *reader) pos0() src.Pos {
+ r.sync(syncPos)
+ if !r.bool() {
+ return src.NoPos
+ }
+
+ posBase := r.posBase()
+ line := r.uint()
+ col := r.uint()
+ return src.MakePos(posBase, line, col)
+}
+
+func (r *reader) posBase() *src.PosBase {
+ return r.inlPosBase(r.p.posBaseIdx(r.reloc(relocPosBase)))
+}
+
+func (pr *pkgReader) posBaseIdx(idx int) *src.PosBase {
+ if b := pr.posBases[idx]; b != nil {
+ return b
+ }
+
+ r := pr.newReader(relocPosBase, idx, syncPosBase)
+ var b *src.PosBase
+
+ filename := r.string()
+
+ if r.bool() {
+ b = src.NewFileBase(filename, filename)
+ } else {
+ pos := r.pos0()
+ line := r.uint()
+ col := r.uint()
+ b = src.NewLinePragmaBase(pos, filename, filename, line, col)
+ }
+
+ pr.posBases[idx] = b
+ return b
+}
+
+func (r *reader) inlPosBase(oldBase *src.PosBase) *src.PosBase {
+ if r.inlCall == nil {
+ return oldBase
+ }
+
+ if newBase, ok := r.inlPosBases[oldBase]; ok {
+ return newBase
+ }
+
+ newBase := src.NewInliningBase(oldBase, r.inlTreeIndex)
+ r.inlPosBases[oldBase] = newBase
+ return newBase
+}
+
+func (r *reader) updatePos(xpos src.XPos) src.XPos {
+ pos := base.Ctxt.PosTable.Pos(xpos)
+ pos.SetBase(r.inlPosBase(pos.Base()))
+ return base.Ctxt.PosTable.XPos(pos)
+}
+
+func (r *reader) origPos(xpos src.XPos) src.XPos {
+ if r.inlCall == nil {
+ return xpos
+ }
+
+ pos := base.Ctxt.PosTable.Pos(xpos)
+ for old, new := range r.inlPosBases {
+ if pos.Base() == new {
+ pos.SetBase(old)
+ return base.Ctxt.PosTable.XPos(pos)
+ }
+ }
+
+ base.FatalfAt(xpos, "pos base missing from inlPosBases")
+ panic("unreachable")
+}
+
+// @@@ Packages
+
+func (r *reader) pkg() *types.Pkg {
+ r.sync(syncPkg)
+ return r.p.pkgIdx(r.reloc(relocPkg))
+}
+
+func (pr *pkgReader) pkgIdx(idx int) *types.Pkg {
+ if pkg := pr.pkgs[idx]; pkg != nil {
+ return pkg
+ }
+
+ pkg := pr.newReader(relocPkg, idx, syncPkgDef).doPkg()
+ pr.pkgs[idx] = pkg
+ return pkg
+}
+
+func (r *reader) doPkg() *types.Pkg {
+ path := r.string()
+ if path == "builtin" {
+ return types.BuiltinPkg
+ }
+ if path == "" {
+ path = r.p.pkgPath
+ }
+
+ name := r.string()
+ height := r.len()
+
+ pkg := types.NewPkg(path, "")
+
+ if pkg.Name == "" {
+ pkg.Name = name
+ } else {
+ assert(pkg.Name == name)
+ }
+
+ if pkg.Height == 0 {
+ pkg.Height = height
+ } else {
+ assert(pkg.Height == height)
+ }
+
+ return pkg
+}
+
+// @@@ Types
+
+func (r *reader) typ() *types.Type {
+ return r.p.typIdx(r.typInfo(), r.dict)
+}
+
+func (r *reader) typInfo() typeInfo {
+ r.sync(syncType)
+ if r.bool() {
+ return typeInfo{idx: r.len(), derived: true}
+ }
+ return typeInfo{idx: r.reloc(relocType), derived: false}
+}
+
+func (pr *pkgReader) typIdx(info typeInfo, dict *readerDict) *types.Type {
+ idx := info.idx
+ var where **types.Type
+ if info.derived {
+ where = &dict.derivedTypes[idx]
+ idx = dict.derived[idx].idx
+ } else {
+ where = &pr.typs[idx]
+ }
+
+ if typ := *where; typ != nil {
+ return typ
+ }
+
+ r := pr.newReader(relocType, idx, syncTypeIdx)
+ r.dict = dict
+
+ typ := r.doTyp()
+ assert(typ != nil)
+
+ // For recursive type declarations involving interfaces and aliases,
+ // above r.doTyp() call may have already set pr.typs[idx], so just
+ // double check and return the type.
+ //
+ // Example:
+ //
+ // type F = func(I)
+ //
+ // type I interface {
+ // m(F)
+ // }
+ //
+ // The writer writes data types in following index order:
+ //
+ // 0: func(I)
+ // 1: I
+ // 2: interface{m(func(I))}
+ //
+ // The reader resolves it in following index order:
+ //
+ // 0 -> 1 -> 2 -> 0 -> 1
+ //
+ // and can divide in logically 2 steps:
+ //
+ // - 0 -> 1 : first time the reader reach type I,
+ // it creates new named type with symbol I.
+ //
+ // - 2 -> 0 -> 1: the reader ends up reaching symbol I again,
+ // now the symbol I was setup in above step, so
+ // the reader just return the named type.
+ //
+ // Now, the functions called return, the pr.typs looks like below:
+ //
+ // - 0 -> 1 -> 2 -> 0 : [<T> I <T>]
+ // - 0 -> 1 -> 2 : [func(I) I <T>]
+ // - 0 -> 1 : [func(I) I interface { "".m(func("".I)) }]
+ //
+ // The idx 1, corresponding with type I was resolved successfully
+ // after r.doTyp() call.
+
+ if prev := *where; prev != nil {
+ return prev
+ }
+
+ *where = typ
+
+ if !typ.IsUntyped() {
+ types.CheckSize(typ)
+ }
+
+ return typ
+}
+
+func (r *reader) doTyp() *types.Type {
+ switch tag := codeType(r.code(syncType)); tag {
+ default:
+ panic(fmt.Sprintf("unexpected type: %v", tag))
+
+ case typeBasic:
+ return *basics[r.len()]
+
+ case typeNamed:
+ obj := r.obj()
+ assert(obj.Op() == ir.OTYPE)
+ return obj.Type()
+
+ case typeTypeParam:
+ return r.dict.targs[r.len()]
+
+ case typeArray:
+ len := int64(r.uint64())
+ return types.NewArray(r.typ(), len)
+ case typeChan:
+ dir := dirs[r.len()]
+ return types.NewChan(r.typ(), dir)
+ case typeMap:
+ return types.NewMap(r.typ(), r.typ())
+ case typePointer:
+ return types.NewPtr(r.typ())
+ case typeSignature:
+ return r.signature(types.LocalPkg, nil)
+ case typeSlice:
+ return types.NewSlice(r.typ())
+ case typeStruct:
+ return r.structType()
+ case typeInterface:
+ return r.interfaceType()
+ }
+}
+
+func (r *reader) interfaceType() *types.Type {
+ tpkg := types.LocalPkg // TODO(mdempsky): Remove after iexport is gone.
+
+ nmethods, nembeddeds := r.len(), r.len()
+
+ fields := make([]*types.Field, nmethods+nembeddeds)
+ methods, embeddeds := fields[:nmethods], fields[nmethods:]
+
+ for i := range methods {
+ pos := r.pos()
+ pkg, sym := r.selector()
+ tpkg = pkg
+ mtyp := r.signature(pkg, typecheck.FakeRecv())
+ methods[i] = types.NewField(pos, sym, mtyp)
+ }
+ for i := range embeddeds {
+ embeddeds[i] = types.NewField(src.NoXPos, nil, r.typ())
+ }
+
+ if len(fields) == 0 {
+ return types.Types[types.TINTER] // empty interface
+ }
+ return r.needWrapper(types.NewInterface(tpkg, fields))
+}
+
+func (r *reader) structType() *types.Type {
+ tpkg := types.LocalPkg // TODO(mdempsky): Remove after iexport is gone.
+ fields := make([]*types.Field, r.len())
+ for i := range fields {
+ pos := r.pos()
+ pkg, sym := r.selector()
+ tpkg = pkg
+ ftyp := r.typ()
+ tag := r.string()
+ embedded := r.bool()
+
+ f := types.NewField(pos, sym, ftyp)
+ f.Note = tag
+ if embedded {
+ f.Embedded = 1
+ }
+ fields[i] = f
+ }
+ return r.needWrapper(types.NewStruct(tpkg, fields))
+}
+
+func (r *reader) signature(tpkg *types.Pkg, recv *types.Field) *types.Type {
+ r.sync(syncSignature)
+
+ params := r.params(&tpkg)
+ results := r.params(&tpkg)
+ if r.bool() { // variadic
+ params[len(params)-1].SetIsDDD(true)
+ }
+
+ return types.NewSignature(tpkg, recv, nil, params, results)
+}
+
+func (r *reader) params(tpkg **types.Pkg) []*types.Field {
+ r.sync(syncParams)
+ fields := make([]*types.Field, r.len())
+ for i := range fields {
+ *tpkg, fields[i] = r.param()
+ }
+ return fields
+}
+
+func (r *reader) param() (*types.Pkg, *types.Field) {
+ r.sync(syncParam)
+
+ pos := r.pos()
+ pkg, sym := r.localIdent()
+ typ := r.typ()
+
+ return pkg, types.NewField(pos, sym, typ)
+}
+
+// @@@ Objects
+
+var objReader = map[*types.Sym]pkgReaderIndex{}
+
+func (r *reader) obj() ir.Node {
+ r.sync(syncObject)
+
+ if r.bool() {
+ idx := r.len()
+ obj := r.dict.funcsObj[idx]
+ if obj == nil {
+ fn := r.dict.funcs[idx]
+ targs := make([]*types.Type, len(fn.explicits))
+ for i, targ := range fn.explicits {
+ targs[i] = r.p.typIdx(targ, r.dict)
+ }
+
+ obj = r.p.objIdx(fn.idx, nil, targs)
+ assert(r.dict.funcsObj[idx] == nil)
+ r.dict.funcsObj[idx] = obj
+ }
+ return obj
+ }
+
+ idx := r.reloc(relocObj)
+
+ explicits := make([]*types.Type, r.len())
+ for i := range explicits {
+ explicits[i] = r.typ()
+ }
+
+ var implicits []*types.Type
+ if r.dict != nil {
+ implicits = r.dict.targs
+ }
+
+ return r.p.objIdx(idx, implicits, explicits)
+}
+
+func (pr *pkgReader) objIdx(idx int, implicits, explicits []*types.Type) ir.Node {
+ rname := pr.newReader(relocName, idx, syncObject1)
+ _, sym := rname.qualifiedIdent()
+ tag := codeObj(rname.code(syncCodeObj))
+
+ if tag == objStub {
+ assert(!sym.IsBlank())
+ switch sym.Pkg {
+ case types.BuiltinPkg, ir.Pkgs.Unsafe:
+ return sym.Def.(ir.Node)
+ }
+ if pri, ok := objReader[sym]; ok {
+ return pri.pr.objIdx(pri.idx, nil, explicits)
+ }
+ if haveLegacyImports {
+ assert(len(explicits) == 0)
+ return typecheck.Resolve(ir.NewIdent(src.NoXPos, sym))
+ }
+ base.Fatalf("unresolved stub: %v", sym)
+ }
+
+ dict := pr.objDictIdx(sym, idx, implicits, explicits)
+
+ r := pr.newReader(relocObj, idx, syncObject1)
+ r.ext = pr.newReader(relocObjExt, idx, syncObject1)
+
+ r.dict = dict
+ r.ext.dict = dict
+
+ sym = r.mangle(sym)
+ if !sym.IsBlank() && sym.Def != nil {
+ return sym.Def.(*ir.Name)
+ }
+
+ do := func(op ir.Op, hasTParams bool) *ir.Name {
+ pos := r.pos()
+ if hasTParams {
+ r.typeParamNames()
+ }
+
+ name := ir.NewDeclNameAt(pos, op, sym)
+ name.Class = ir.PEXTERN // may be overridden later
+ if !sym.IsBlank() {
+ if sym.Def != nil {
+ base.FatalfAt(name.Pos(), "already have a definition for %v", name)
+ }
+ assert(sym.Def == nil)
+ sym.Def = name
+ }
+ return name
+ }
+
+ switch tag {
+ default:
+ panic("unexpected object")
+
+ case objAlias:
+ name := do(ir.OTYPE, false)
+ r.setType(name, r.typ())
+ name.SetAlias(true)
+ return name
+
+ case objConst:
+ name := do(ir.OLITERAL, false)
+ typ, val := r.value()
+ r.setType(name, typ)
+ r.setValue(name, val)
+ return name
+
+ case objFunc:
+ if sym.Name == "init" {
+ sym = renameinit()
+ }
+ name := do(ir.ONAME, true)
+ r.setType(name, r.signature(sym.Pkg, nil))
+
+ name.Func = ir.NewFunc(r.pos())
+ name.Func.Nname = name
+
+ r.ext.funcExt(name)
+ return name
+
+ case objType:
+ name := do(ir.OTYPE, true)
+ typ := types.NewNamed(name)
+ r.setType(name, typ)
+
+ // Important: We need to do this before SetUnderlying.
+ r.ext.typeExt(name)
+
+ // We need to defer CheckSize until we've called SetUnderlying to
+ // handle recursive types.
+ types.DeferCheckSize()
+ typ.SetUnderlying(r.typ())
+ types.ResumeCheckSize()
+
+ methods := make([]*types.Field, r.len())
+ for i := range methods {
+ methods[i] = r.method()
+ }
+ if len(methods) != 0 {
+ typ.Methods().Set(methods)
+ }
+
+ if !typ.IsPtr() {
+ r.needWrapper(typ)
+ }
+
+ return name
+
+ case objVar:
+ name := do(ir.ONAME, false)
+ r.setType(name, r.typ())
+ r.ext.varExt(name)
+ return name
+ }
+}
+
+func (r *reader) mangle(sym *types.Sym) *types.Sym {
+ if !r.hasTypeParams() {
+ return sym
+ }
+
+ var buf bytes.Buffer
+ buf.WriteString(sym.Name)
+ buf.WriteByte('[')
+ for i, targ := range r.dict.targs {
+ if i > 0 {
+ if i == r.dict.implicits {
+ buf.WriteByte(';')
+ } else {
+ buf.WriteByte(',')
+ }
+ }
+ buf.WriteString(targ.LinkString())
+ }
+ buf.WriteByte(']')
+ return sym.Pkg.Lookup(buf.String())
+}
+
+func (pr *pkgReader) objDictIdx(sym *types.Sym, idx int, implicits, explicits []*types.Type) *readerDict {
+ r := pr.newReader(relocObjDict, idx, syncObject1)
+
+ var dict readerDict
+
+ nimplicits := r.len()
+ nexplicits := r.len()
+
+ if nimplicits > len(implicits) || nexplicits != len(explicits) {
+ base.Fatalf("%v has %v+%v params, but instantiated with %v+%v args", sym, nimplicits, nexplicits, len(implicits), len(explicits))
+ }
+
+ dict.targs = append(implicits[:nimplicits:nimplicits], explicits...)
+ dict.implicits = nimplicits
+
+ // For stenciling, we can just skip over the type parameters.
+ for range dict.targs[dict.implicits:] {
+ // Skip past bounds without actually evaluating them.
+ r.sync(syncType)
+ if r.bool() {
+ r.len()
+ } else {
+ r.reloc(relocType)
+ }
+ }
+
+ dict.derived = make([]derivedInfo, r.len())
+ dict.derivedTypes = make([]*types.Type, len(dict.derived))
+ for i := range dict.derived {
+ dict.derived[i] = derivedInfo{r.reloc(relocType), r.bool()}
+ }
+
+ dict.funcs = make([]objInfo, r.len())
+ dict.funcsObj = make([]ir.Node, len(dict.funcs))
+ for i := range dict.funcs {
+ objIdx := r.reloc(relocObj)
+ targs := make([]typeInfo, r.len())
+ for j := range targs {
+ targs[j] = r.typInfo()
+ }
+ dict.funcs[i] = objInfo{idx: objIdx, explicits: targs}
+ }
+
+ return &dict
+}
+
+func (r *reader) typeParamNames() {
+ r.sync(syncTypeParamNames)
+
+ for range r.dict.targs[r.dict.implicits:] {
+ r.pos()
+ r.localIdent()
+ }
+}
+
+func (r *reader) value() (*types.Type, constant.Value) {
+ r.sync(syncValue)
+ typ := r.typ()
+ return typ, FixValue(typ, r.rawValue())
+}
+
+func (r *reader) method() *types.Field {
+ r.sync(syncMethod)
+ pos := r.pos()
+ pkg, sym := r.selector()
+ r.typeParamNames()
+ _, recv := r.param()
+ typ := r.signature(pkg, recv)
+
+ fnsym := sym
+ fnsym = ir.MethodSym(recv.Type, fnsym)
+ name := ir.NewNameAt(pos, fnsym)
+ r.setType(name, typ)
+
+ name.Func = ir.NewFunc(r.pos())
+ name.Func.Nname = name
+
+ r.ext.funcExt(name)
+
+ meth := types.NewField(name.Func.Pos(), sym, typ)
+ meth.Nname = name
+ meth.SetNointerface(name.Func.Pragma&ir.Nointerface != 0)
+
+ return meth
+}
+
+func (r *reader) qualifiedIdent() (pkg *types.Pkg, sym *types.Sym) {
+ r.sync(syncSym)
+ pkg = r.pkg()
+ if name := r.string(); name != "" {
+ sym = pkg.Lookup(name)
+ }
+ return
+}
+
+func (r *reader) localIdent() (pkg *types.Pkg, sym *types.Sym) {
+ r.sync(syncLocalIdent)
+ pkg = r.pkg()
+ if name := r.string(); name != "" {
+ sym = pkg.Lookup(name)
+ }
+ return
+}
+
+func (r *reader) selector() (origPkg *types.Pkg, sym *types.Sym) {
+ r.sync(syncSelector)
+ origPkg = r.pkg()
+ name := r.string()
+ pkg := origPkg
+ if types.IsExported(name) {
+ pkg = types.LocalPkg
+ }
+ sym = pkg.Lookup(name)
+ return
+}
+
+func (r *reader) hasTypeParams() bool {
+ return r.dict.hasTypeParams()
+}
+
+func (dict *readerDict) hasTypeParams() bool {
+ return dict != nil && len(dict.targs) != 0
+}
+
+// @@@ Compiler extensions
+
+func (r *reader) funcExt(name *ir.Name) {
+ r.sync(syncFuncExt)
+
+ name.Class = 0 // so MarkFunc doesn't complain
+ ir.MarkFunc(name)
+
+ fn := name.Func
+
+ // XXX: Workaround because linker doesn't know how to copy Pos.
+ if !fn.Pos().IsKnown() {
+ fn.SetPos(name.Pos())
+ }
+
+ // Normally, we only compile local functions, which saves redundant compilation work.
+ // n.Defn is not nil for local functions, and is nil for imported function. But for
+ // generic functions, we might have an instantiation that no other package has seen before.
+ // So we need to be conservative and compile it again.
+ //
+ // That's why name.Defn is set here, so ir.VisitFuncsBottomUp can analyze function.
+ // TODO(mdempsky,cuonglm): find a cleaner way to handle this.
+ if name.Sym().Pkg == types.LocalPkg || r.hasTypeParams() {
+ name.Defn = fn
+ }
+
+ fn.Pragma = r.pragmaFlag()
+ r.linkname(name)
+
+ typecheck.Func(fn)
+
+ if r.bool() {
+ fn.ABI = obj.ABI(r.uint64())
+
+ // Escape analysis.
+ for _, fs := range &types.RecvsParams {
+ for _, f := range fs(name.Type()).FieldSlice() {
+ f.Note = r.string()
+ }
+ }
+
+ if r.bool() {
+ fn.Inl = &ir.Inline{
+ Cost: int32(r.len()),
+ CanDelayResults: r.bool(),
+ }
+ r.addBody(name.Func)
+ }
+ } else {
+ r.addBody(name.Func)
+ }
+ r.sync(syncEOF)
+}
+
+func (r *reader) typeExt(name *ir.Name) {
+ r.sync(syncTypeExt)
+
+ typ := name.Type()
+
+ if r.hasTypeParams() {
+ // Set "RParams" (really type arguments here, not parameters) so
+ // this type is treated as "fully instantiated". This ensures the
+ // type descriptor is written out as DUPOK and method wrappers are
+ // generated even for imported types.
+ var targs []*types.Type
+ targs = append(targs, r.dict.targs...)
+ typ.SetRParams(targs)
+ }
+
+ name.SetPragma(r.pragmaFlag())
+ if name.Pragma()&ir.NotInHeap != 0 {
+ typ.SetNotInHeap(true)
+ }
+
+ typecheck.SetBaseTypeIndex(typ, r.int64(), r.int64())
+}
+
+func (r *reader) varExt(name *ir.Name) {
+ r.sync(syncVarExt)
+ r.linkname(name)
+}
+
+func (r *reader) linkname(name *ir.Name) {
+ assert(name.Op() == ir.ONAME)
+ r.sync(syncLinkname)
+
+ if idx := r.int64(); idx >= 0 {
+ lsym := name.Linksym()
+ lsym.SymIdx = int32(idx)
+ lsym.Set(obj.AttrIndexed, true)
+ } else {
+ name.Sym().Linkname = r.string()
+ }
+}
+
+func (r *reader) pragmaFlag() ir.PragmaFlag {
+ r.sync(syncPragma)
+ return ir.PragmaFlag(r.int())
+}
+
+// @@@ Function bodies
+
+// bodyReader tracks where the serialized IR for a function's body can
+// be found.
+var bodyReader = map[*ir.Func]pkgReaderIndex{}
+
+// todoBodies holds the list of function bodies that still need to be
+// constructed.
+var todoBodies []*ir.Func
+
+func (r *reader) addBody(fn *ir.Func) {
+ pri := pkgReaderIndex{r.p, r.reloc(relocBody), r.dict}
+ bodyReader[fn] = pri
+
+ if r.curfn == nil {
+ todoBodies = append(todoBodies, fn)
+ return
+ }
+
+ pri.funcBody(fn)
+}
+
+func (pri pkgReaderIndex) funcBody(fn *ir.Func) {
+ r := pri.asReader(relocBody, syncFuncBody)
+ r.funcBody(fn)
+}
+
+func (r *reader) funcBody(fn *ir.Func) {
+ r.curfn = fn
+ r.closureVars = fn.ClosureVars
+
+ ir.WithFunc(fn, func() {
+ r.funcargs(fn)
+
+ if !r.bool() {
+ return
+ }
+
+ body := r.stmts()
+ if body == nil {
+ pos := src.NoXPos
+ if quirksMode() {
+ pos = funcParamsEndPos(fn)
+ }
+ body = []ir.Node{typecheck.Stmt(ir.NewBlockStmt(pos, nil))}
+ }
+ fn.Body = body
+ fn.Endlineno = r.pos()
+ })
+
+ r.marker.WriteTo(fn)
+}
+
+func (r *reader) funcargs(fn *ir.Func) {
+ sig := fn.Nname.Type()
+
+ if recv := sig.Recv(); recv != nil {
+ r.funcarg(recv, recv.Sym, ir.PPARAM)
+ }
+ for _, param := range sig.Params().FieldSlice() {
+ r.funcarg(param, param.Sym, ir.PPARAM)
+ }
+
+ for i, param := range sig.Results().FieldSlice() {
+ sym := types.OrigSym(param.Sym)
+
+ if sym == nil || sym.IsBlank() {
+ prefix := "~r"
+ if r.inlCall != nil {
+ prefix = "~R"
+ } else if sym != nil {
+ prefix = "~b"
+ }
+ sym = typecheck.LookupNum(prefix, i)
+ }
+
+ r.funcarg(param, sym, ir.PPARAMOUT)
+ }
+}
+
+func (r *reader) funcarg(param *types.Field, sym *types.Sym, ctxt ir.Class) {
+ if sym == nil {
+ assert(ctxt == ir.PPARAM)
+ if r.inlCall != nil {
+ r.inlvars.Append(ir.BlankNode)
+ }
+ return
+ }
+
+ name := ir.NewNameAt(r.updatePos(param.Pos), sym)
+ r.setType(name, param.Type)
+ r.addLocal(name, ctxt)
+
+ if r.inlCall == nil {
+ if !r.funarghack {
+ param.Sym = sym
+ param.Nname = name
+ }
+ } else {
+ if ctxt == ir.PPARAMOUT {
+ r.retvars.Append(name)
+ } else {
+ r.inlvars.Append(name)
+ }
+ }
+}
+
+func (r *reader) addLocal(name *ir.Name, ctxt ir.Class) {
+ assert(ctxt == ir.PAUTO || ctxt == ir.PPARAM || ctxt == ir.PPARAMOUT)
+
+ r.sync(syncAddLocal)
+ if enableSync {
+ want := r.int()
+ if have := len(r.locals); have != want {
+ base.FatalfAt(name.Pos(), "locals table has desynced")
+ }
+ }
+
+ name.SetUsed(true)
+ r.locals = append(r.locals, name)
+
+ // TODO(mdempsky): Move earlier.
+ if ir.IsBlank(name) {
+ return
+ }
+
+ if r.inlCall != nil {
+ if ctxt == ir.PAUTO {
+ name.SetInlLocal(true)
+ } else {
+ name.SetInlFormal(true)
+ ctxt = ir.PAUTO
+ }
+
+ // TODO(mdempsky): Rethink this hack.
+ if strings.HasPrefix(name.Sym().Name, "~") || base.Flag.GenDwarfInl == 0 {
+ name.SetPos(r.inlCall.Pos())
+ name.SetInlFormal(false)
+ name.SetInlLocal(false)
+ }
+ }
+
+ name.Class = ctxt
+ name.Curfn = r.curfn
+
+ r.curfn.Dcl = append(r.curfn.Dcl, name)
+
+ if ctxt == ir.PAUTO {
+ name.SetFrameOffset(0)
+ }
+}
+
+func (r *reader) useLocal() *ir.Name {
+ r.sync(syncUseObjLocal)
+ if r.bool() {
+ return r.locals[r.len()]
+ }
+ return r.closureVars[r.len()]
+}
+
+func (r *reader) openScope() {
+ r.sync(syncOpenScope)
+ pos := r.pos()
+
+ if base.Flag.Dwarf {
+ r.scopeVars = append(r.scopeVars, len(r.curfn.Dcl))
+ r.marker.Push(pos)
+ }
+}
+
+func (r *reader) closeScope() {
+ r.sync(syncCloseScope)
+ r.lastCloseScopePos = r.pos()
+
+ r.closeAnotherScope()
+}
+
+// closeAnotherScope is like closeScope, but it reuses the same mark
+// position as the last closeScope call. This is useful for "for" and
+// "if" statements, as their implicit blocks always end at the same
+// position as an explicit block.
+func (r *reader) closeAnotherScope() {
+ r.sync(syncCloseAnotherScope)
+
+ if base.Flag.Dwarf {
+ scopeVars := r.scopeVars[len(r.scopeVars)-1]
+ r.scopeVars = r.scopeVars[:len(r.scopeVars)-1]
+
+ // Quirkish: noder decides which scopes to keep before
+ // typechecking, whereas incremental typechecking during IR
+ // construction can result in new autotemps being allocated. To
+ // produce identical output, we ignore autotemps here for the
+ // purpose of deciding whether to retract the scope.
+ //
+ // This is important for net/http/fcgi, because it contains:
+ //
+ // var body io.ReadCloser
+ // if len(content) > 0 {
+ // body, req.pw = io.Pipe()
+ // } else { … }
+ //
+ // Notably, io.Pipe is inlinable, and inlining it introduces a ~R0
+ // variable at the call site.
+ //
+ // Noder does not preserve the scope where the io.Pipe() call
+ // resides, because it doesn't contain any declared variables in
+ // source. So the ~R0 variable ends up being assigned to the
+ // enclosing scope instead.
+ //
+ // However, typechecking this assignment also introduces
+ // autotemps, because io.Pipe's results need conversion before
+ // they can be assigned to their respective destination variables.
+ //
+ // TODO(mdempsky): We should probably just keep all scopes, and
+ // let dwarfgen take care of pruning them instead.
+ retract := true
+ for _, n := range r.curfn.Dcl[scopeVars:] {
+ if !n.AutoTemp() {
+ retract = false
+ break
+ }
+ }
+
+ if retract {
+ // no variables were declared in this scope, so we can retract it.
+ r.marker.Unpush()
+ } else {
+ r.marker.Pop(r.lastCloseScopePos)
+ }
+ }
+}
+
+// @@@ Statements
+
+func (r *reader) stmt() ir.Node {
+ switch stmts := r.stmts(); len(stmts) {
+ case 0:
+ return nil
+ case 1:
+ return stmts[0]
+ default:
+ return ir.NewBlockStmt(stmts[0].Pos(), stmts)
+ }
+}
+
+func (r *reader) stmts() []ir.Node {
+ assert(ir.CurFunc == r.curfn)
+ var res ir.Nodes
+
+ r.sync(syncStmts)
+ for {
+ tag := codeStmt(r.code(syncStmt1))
+ if tag == stmtEnd {
+ r.sync(syncStmtsEnd)
+ return res
+ }
+
+ if n := r.stmt1(tag, &res); n != nil {
+ res.Append(typecheck.Stmt(n))
+ }
+ }
+}
+
+func (r *reader) stmt1(tag codeStmt, out *ir.Nodes) ir.Node {
+ var label *types.Sym
+ if n := len(*out); n > 0 {
+ if ls, ok := (*out)[n-1].(*ir.LabelStmt); ok {
+ label = ls.Label
+ }
+ }
+
+ switch tag {
+ default:
+ panic("unexpected statement")
+
+ case stmtAssign:
+ pos := r.pos()
+
+ // TODO(mdempsky): After quirks mode is gone, swap these
+ // statements so we visit LHS before RHS again.
+ rhs := r.exprList()
+ names, lhs := r.assignList()
+
+ if len(rhs) == 0 {
+ for _, name := range names {
+ as := ir.NewAssignStmt(pos, name, nil)
+ as.PtrInit().Append(ir.NewDecl(pos, ir.ODCL, name))
+ out.Append(typecheck.Stmt(as))
+ }
+ return nil
+ }
+
+ if len(lhs) == 1 && len(rhs) == 1 {
+ n := ir.NewAssignStmt(pos, lhs[0], rhs[0])
+ n.Def = r.initDefn(n, names)
+ return n
+ }
+
+ n := ir.NewAssignListStmt(pos, ir.OAS2, lhs, rhs)
+ n.Def = r.initDefn(n, names)
+ return n
+
+ case stmtAssignOp:
+ op := r.op()
+ lhs := r.expr()
+ pos := r.pos()
+ rhs := r.expr()
+ return ir.NewAssignOpStmt(pos, op, lhs, rhs)
+
+ case stmtIncDec:
+ op := r.op()
+ lhs := r.expr()
+ pos := r.pos()
+ n := ir.NewAssignOpStmt(pos, op, lhs, ir.NewBasicLit(pos, one))
+ n.IncDec = true
+ return n
+
+ case stmtBlock:
+ out.Append(r.blockStmt()...)
+ return nil
+
+ case stmtBranch:
+ pos := r.pos()
+ op := r.op()
+ sym := r.optLabel()
+ return ir.NewBranchStmt(pos, op, sym)
+
+ case stmtCall:
+ pos := r.pos()
+ op := r.op()
+ call := r.expr()
+ return ir.NewGoDeferStmt(pos, op, call)
+
+ case stmtExpr:
+ return r.expr()
+
+ case stmtFor:
+ return r.forStmt(label)
+
+ case stmtIf:
+ return r.ifStmt()
+
+ case stmtLabel:
+ pos := r.pos()
+ sym := r.label()
+ return ir.NewLabelStmt(pos, sym)
+
+ case stmtReturn:
+ pos := r.pos()
+ results := r.exprList()
+ return ir.NewReturnStmt(pos, results)
+
+ case stmtSelect:
+ return r.selectStmt(label)
+
+ case stmtSend:
+ pos := r.pos()
+ ch := r.expr()
+ value := r.expr()
+ return ir.NewSendStmt(pos, ch, value)
+
+ case stmtSwitch:
+ return r.switchStmt(label)
+
+ case stmtTypeDeclHack:
+ // fake "type _ = int" declaration to prevent inlining in quirks mode.
+ assert(quirksMode())
+
+ name := ir.NewDeclNameAt(src.NoXPos, ir.OTYPE, ir.BlankNode.Sym())
+ name.SetAlias(true)
+ r.setType(name, types.Types[types.TINT])
+
+ n := ir.NewDecl(src.NoXPos, ir.ODCLTYPE, name)
+ n.SetTypecheck(1)
+ return n
+ }
+}
+
+func (r *reader) assignList() ([]*ir.Name, []ir.Node) {
+ lhs := make([]ir.Node, r.len())
+ var names []*ir.Name
+
+ for i := range lhs {
+ if r.bool() {
+ pos := r.pos()
+ _, sym := r.localIdent()
+ typ := r.typ()
+
+ name := ir.NewNameAt(pos, sym)
+ lhs[i] = name
+ names = append(names, name)
+ r.setType(name, typ)
+ r.addLocal(name, ir.PAUTO)
+ continue
+ }
+
+ lhs[i] = r.expr()
+ }
+
+ return names, lhs
+}
+
+func (r *reader) blockStmt() []ir.Node {
+ r.sync(syncBlockStmt)
+ r.openScope()
+ stmts := r.stmts()
+ r.closeScope()
+ return stmts
+}
+
+func (r *reader) forStmt(label *types.Sym) ir.Node {
+ r.sync(syncForStmt)
+
+ r.openScope()
+
+ if r.bool() {
+ pos := r.pos()
+
+ // TODO(mdempsky): After quirks mode is gone, swap these
+ // statements so we read LHS before X again.
+ x := r.expr()
+ names, lhs := r.assignList()
+
+ body := r.blockStmt()
+ r.closeAnotherScope()
+
+ rang := ir.NewRangeStmt(pos, nil, nil, x, body)
+ if len(lhs) >= 1 {
+ rang.Key = lhs[0]
+ if len(lhs) >= 2 {
+ rang.Value = lhs[1]
+ }
+ }
+ rang.Def = r.initDefn(rang, names)
+ rang.Label = label
+ return rang
+ }
+
+ pos := r.pos()
+ init := r.stmt()
+ cond := r.expr()
+ post := r.stmt()
+ body := r.blockStmt()
+ r.closeAnotherScope()
+
+ stmt := ir.NewForStmt(pos, init, cond, post, body)
+ stmt.Label = label
+ return stmt
+}
+
+func (r *reader) ifStmt() ir.Node {
+ r.sync(syncIfStmt)
+ r.openScope()
+ pos := r.pos()
+ init := r.stmts()
+ cond := r.expr()
+ then := r.blockStmt()
+ els := r.stmts()
+ n := ir.NewIfStmt(pos, cond, then, els)
+ n.SetInit(init)
+ r.closeAnotherScope()
+ return n
+}
+
+func (r *reader) selectStmt(label *types.Sym) ir.Node {
+ r.sync(syncSelectStmt)
+
+ pos := r.pos()
+ clauses := make([]*ir.CommClause, r.len())
+ for i := range clauses {
+ if i > 0 {
+ r.closeScope()
+ }
+ r.openScope()
+
+ pos := r.pos()
+ comm := r.stmt()
+ body := r.stmts()
+
+ clauses[i] = ir.NewCommStmt(pos, comm, body)
+ }
+ if len(clauses) > 0 {
+ r.closeScope()
+ }
+ n := ir.NewSelectStmt(pos, clauses)
+ n.Label = label
+ return n
+}
+
+func (r *reader) switchStmt(label *types.Sym) ir.Node {
+ r.sync(syncSwitchStmt)
+
+ r.openScope()
+ pos := r.pos()
+ init := r.stmt()
+
+ var tag ir.Node
+ if r.bool() {
+ pos := r.pos()
+ var ident *ir.Ident
+ if r.bool() {
+ pos := r.pos()
+ sym := typecheck.Lookup(r.string())
+ ident = ir.NewIdent(pos, sym)
+ }
+ x := r.expr()
+ tag = ir.NewTypeSwitchGuard(pos, ident, x)
+ } else {
+ tag = r.expr()
+ }
+
+ tswitch, ok := tag.(*ir.TypeSwitchGuard)
+ if ok && tswitch.Tag == nil {
+ tswitch = nil
+ }
+
+ clauses := make([]*ir.CaseClause, r.len())
+ for i := range clauses {
+ if i > 0 {
+ r.closeScope()
+ }
+ r.openScope()
+
+ pos := r.pos()
+ cases := r.exprList()
+
+ clause := ir.NewCaseStmt(pos, cases, nil)
+ if tswitch != nil {
+ pos := r.pos()
+ typ := r.typ()
+
+ name := ir.NewNameAt(pos, tswitch.Tag.Sym())
+ r.setType(name, typ)
+ r.addLocal(name, ir.PAUTO)
+ clause.Var = name
+ name.Defn = tswitch
+ }
+
+ clause.Body = r.stmts()
+ clauses[i] = clause
+ }
+ if len(clauses) > 0 {
+ r.closeScope()
+ }
+ r.closeScope()
+
+ n := ir.NewSwitchStmt(pos, tag, clauses)
+ n.Label = label
+ if init != nil {
+ n.SetInit([]ir.Node{init})
+ }
+ return n
+}
+
+func (r *reader) label() *types.Sym {
+ r.sync(syncLabel)
+ name := r.string()
+ if r.inlCall != nil {
+ name = fmt.Sprintf("~%s·%d", name, inlgen)
+ }
+ return typecheck.Lookup(name)
+}
+
+func (r *reader) optLabel() *types.Sym {
+ r.sync(syncOptLabel)
+ if r.bool() {
+ return r.label()
+ }
+ return nil
+}
+
+// initDefn marks the given names as declared by defn and populates
+// its Init field with ODCL nodes. It then reports whether any names
+// were so declared, which can be used to initialize defn.Def.
+func (r *reader) initDefn(defn ir.InitNode, names []*ir.Name) bool {
+ if len(names) == 0 {
+ return false
+ }
+
+ init := make([]ir.Node, len(names))
+ for i, name := range names {
+ name.Defn = defn
+ init[i] = ir.NewDecl(name.Pos(), ir.ODCL, name)
+ }
+ defn.SetInit(init)
+ return true
+}
+
+// @@@ Expressions
+
+// expr reads and returns a typechecked expression.
+func (r *reader) expr() (res ir.Node) {
+ defer func() {
+ if res != nil && res.Typecheck() == 0 {
+ base.FatalfAt(res.Pos(), "%v missed typecheck", res)
+ }
+ }()
+
+ switch tag := codeExpr(r.code(syncExpr)); tag {
+ default:
+ panic("unhandled expression")
+
+ case exprNone:
+ return nil
+
+ case exprBlank:
+ // blank only allowed in LHS of assignments
+ // TODO(mdempsky): Handle directly in assignList instead?
+ return typecheck.AssignExpr(ir.BlankNode)
+
+ case exprLocal:
+ return typecheck.Expr(r.useLocal())
+
+ case exprName:
+ // Callee instead of Expr allows builtins
+ // TODO(mdempsky): Handle builtins directly in exprCall, like method calls?
+ return typecheck.Callee(r.obj())
+
+ case exprType:
+ // TODO(mdempsky): ir.TypeNode should probably return a typecheck'd node.
+ n := ir.TypeNode(r.typ())
+ n.SetTypecheck(1)
+ return n
+
+ case exprConst:
+ pos := r.pos()
+ typ, val := r.value()
+ op := r.op()
+ orig := r.string()
+ return typecheck.Expr(OrigConst(pos, typ, val, op, orig))
+
+ case exprCompLit:
+ return r.compLit()
+
+ case exprFuncLit:
+ return r.funcLit()
+
+ case exprSelector:
+ x := r.expr()
+ pos := r.pos()
+ _, sym := r.selector()
+ return typecheck.Expr(ir.NewSelectorExpr(pos, ir.OXDOT, x, sym))
+
+ case exprIndex:
+ x := r.expr()
+ pos := r.pos()
+ index := r.expr()
+ return typecheck.Expr(ir.NewIndexExpr(pos, x, index))
+
+ case exprSlice:
+ x := r.expr()
+ pos := r.pos()
+ var index [3]ir.Node
+ for i := range index {
+ index[i] = r.expr()
+ }
+ op := ir.OSLICE
+ if index[2] != nil {
+ op = ir.OSLICE3
+ }
+ return typecheck.Expr(ir.NewSliceExpr(pos, op, x, index[0], index[1], index[2]))
+
+ case exprAssert:
+ x := r.expr()
+ pos := r.pos()
+ typ := r.expr().(ir.Ntype)
+ return typecheck.Expr(ir.NewTypeAssertExpr(pos, x, typ))
+
+ case exprUnaryOp:
+ op := r.op()
+ pos := r.pos()
+ x := r.expr()
+
+ switch op {
+ case ir.OADDR:
+ return typecheck.Expr(typecheck.NodAddrAt(pos, x))
+ case ir.ODEREF:
+ return typecheck.Expr(ir.NewStarExpr(pos, x))
+ }
+ return typecheck.Expr(ir.NewUnaryExpr(pos, op, x))
+
+ case exprBinaryOp:
+ op := r.op()
+ x := r.expr()
+ pos := r.pos()
+ y := r.expr()
+
+ switch op {
+ case ir.OANDAND, ir.OOROR:
+ return typecheck.Expr(ir.NewLogicalExpr(pos, op, x, y))
+ }
+ return typecheck.Expr(ir.NewBinaryExpr(pos, op, x, y))
+
+ case exprCall:
+ fun := r.expr()
+ if r.bool() { // method call
+ pos := r.pos()
+ _, sym := r.selector()
+ fun = typecheck.Callee(ir.NewSelectorExpr(pos, ir.OXDOT, fun, sym))
+ }
+ pos := r.pos()
+ args := r.exprs()
+ dots := r.bool()
+ return typecheck.Call(pos, fun, args, dots)
+
+ case exprConvert:
+ typ := r.typ()
+ pos := r.pos()
+ x := r.expr()
+ return typecheck.Expr(ir.NewConvExpr(pos, ir.OCONV, typ, x))
+ }
+}
+
+func (r *reader) compLit() ir.Node {
+ r.sync(syncCompLit)
+ pos := r.pos()
+ typ0 := r.typ()
+
+ typ := typ0
+ if typ.IsPtr() {
+ typ = typ.Elem()
+ }
+ if typ.Kind() == types.TFORW {
+ base.FatalfAt(pos, "unresolved composite literal type: %v", typ)
+ }
+ isStruct := typ.Kind() == types.TSTRUCT
+
+ elems := make([]ir.Node, r.len())
+ for i := range elems {
+ elemp := &elems[i]
+
+ if isStruct {
+ sk := ir.NewStructKeyExpr(r.pos(), typ.Field(r.len()), nil)
+ *elemp, elemp = sk, &sk.Value
+ } else if r.bool() {
+ kv := ir.NewKeyExpr(r.pos(), r.expr(), nil)
+ *elemp, elemp = kv, &kv.Value
+ }
+
+ *elemp = wrapName(r.pos(), r.expr())
+ }
+
+ lit := typecheck.Expr(ir.NewCompLitExpr(pos, ir.OCOMPLIT, ir.TypeNode(typ), elems))
+ if typ0.IsPtr() {
+ lit = typecheck.Expr(typecheck.NodAddrAt(pos, lit))
+ lit.SetType(typ0)
+ }
+ return lit
+}
+
+func wrapName(pos src.XPos, x ir.Node) ir.Node {
+ // These nodes do not carry line numbers.
+ // Introduce a wrapper node to give them the correct line.
+ switch ir.Orig(x).Op() {
+ case ir.OTYPE, ir.OLITERAL:
+ if x.Sym() == nil {
+ break
+ }
+ fallthrough
+ case ir.ONAME, ir.ONONAME, ir.OPACK, ir.ONIL:
+ p := ir.NewParenExpr(pos, x)
+ p.SetImplicit(true)
+ return p
+ }
+ return x
+}
+
+func (r *reader) funcLit() ir.Node {
+ r.sync(syncFuncLit)
+
+ pos := r.pos()
+ typPos := r.pos()
+ xtype2 := r.signature(types.LocalPkg, nil)
+
+ opos := pos
+ if quirksMode() {
+ opos = r.origPos(pos)
+ }
+
+ fn := ir.NewClosureFunc(opos, r.curfn != nil)
+ clo := fn.OClosure
+ ir.NameClosure(clo, r.curfn)
+
+ r.setType(fn.Nname, xtype2)
+ if quirksMode() {
+ fn.Nname.Ntype = ir.TypeNodeAt(typPos, xtype2)
+ }
+ typecheck.Func(fn)
+ r.setType(clo, fn.Type())
+
+ fn.ClosureVars = make([]*ir.Name, 0, r.len())
+ for len(fn.ClosureVars) < cap(fn.ClosureVars) {
+ ir.NewClosureVar(r.pos(), fn, r.useLocal())
+ }
+
+ r.addBody(fn)
+
+ // TODO(mdempsky): Remove hard-coding of typecheck.Target.
+ return ir.UseClosure(clo, typecheck.Target)
+}
+
+func (r *reader) exprList() []ir.Node {
+ r.sync(syncExprList)
+ return r.exprs()
+}
+
+func (r *reader) exprs() []ir.Node {
+ r.sync(syncExprs)
+ nodes := make([]ir.Node, r.len())
+ if len(nodes) == 0 {
+ return nil // TODO(mdempsky): Unclear if this matters.
+ }
+ for i := range nodes {
+ nodes[i] = r.expr()
+ }
+ return nodes
+}
+
+func (r *reader) op() ir.Op {
+ r.sync(syncOp)
+ return ir.Op(r.len())
+}
+
+// @@@ Package initialization
+
+func (r *reader) pkgInit(self *types.Pkg, target *ir.Package) {
+ if quirksMode() {
+ for i, n := 0, r.len(); i < n; i++ {
+ // Eagerly register position bases, so their filenames are
+ // assigned stable indices.
+ posBase := r.posBase()
+ _ = base.Ctxt.PosTable.XPos(src.MakePos(posBase, 0, 0))
+ }
+
+ for i, n := 0, r.len(); i < n; i++ {
+ // Eagerly resolve imported objects, so any filenames registered
+ // in the process are assigned stable indices too.
+ _, sym := r.qualifiedIdent()
+ typecheck.Resolve(ir.NewIdent(src.NoXPos, sym))
+ assert(sym.Def != nil)
+ }
+ }
+
+ cgoPragmas := make([][]string, r.len())
+ for i := range cgoPragmas {
+ cgoPragmas[i] = r.strings()
+ }
+ target.CgoPragmas = cgoPragmas
+
+ r.pkgDecls(target)
+
+ r.sync(syncEOF)
+}
+
+func (r *reader) pkgDecls(target *ir.Package) {
+ r.sync(syncDecls)
+ for {
+ switch code := codeDecl(r.code(syncDecl)); code {
+ default:
+ panic(fmt.Sprintf("unhandled decl: %v", code))
+
+ case declEnd:
+ return
+
+ case declFunc:
+ names := r.pkgObjs(target)
+ assert(len(names) == 1)
+ target.Decls = append(target.Decls, names[0].Func)
+
+ case declMethod:
+ typ := r.typ()
+ _, sym := r.selector()
+
+ method := typecheck.Lookdot1(nil, sym, typ, typ.Methods(), 0)
+ target.Decls = append(target.Decls, method.Nname.(*ir.Name).Func)
+
+ case declVar:
+ pos := r.pos()
+ names := r.pkgObjs(target)
+ values := r.exprList()
+
+ if len(names) > 1 && len(values) == 1 {
+ as := ir.NewAssignListStmt(pos, ir.OAS2, nil, values)
+ for _, name := range names {
+ as.Lhs.Append(name)
+ name.Defn = as
+ }
+ target.Decls = append(target.Decls, as)
+ } else {
+ for i, name := range names {
+ as := ir.NewAssignStmt(pos, name, nil)
+ if i < len(values) {
+ as.Y = values[i]
+ }
+ name.Defn = as
+ target.Decls = append(target.Decls, as)
+ }
+ }
+
+ if n := r.len(); n > 0 {
+ assert(len(names) == 1)
+ embeds := make([]ir.Embed, n)
+ for i := range embeds {
+ embeds[i] = ir.Embed{Pos: r.pos(), Patterns: r.strings()}
+ }
+ names[0].Embed = &embeds
+ target.Embeds = append(target.Embeds, names[0])
+ }
+
+ case declOther:
+ r.pkgObjs(target)
+ }
+ }
+}
+
+func (r *reader) pkgObjs(target *ir.Package) []*ir.Name {
+ r.sync(syncDeclNames)
+ nodes := make([]*ir.Name, r.len())
+ for i := range nodes {
+ r.sync(syncDeclName)
+
+ name := r.obj().(*ir.Name)
+ nodes[i] = name
+
+ sym := name.Sym()
+ if sym.IsBlank() {
+ continue
+ }
+
+ switch name.Class {
+ default:
+ base.FatalfAt(name.Pos(), "unexpected class: %v", name.Class)
+
+ case ir.PEXTERN:
+ target.Externs = append(target.Externs, name)
+
+ case ir.PFUNC:
+ assert(name.Type().Recv() == nil)
+
+ // TODO(mdempsky): Cleaner way to recognize init?
+ if strings.HasPrefix(sym.Name, "init.") {
+ target.Inits = append(target.Inits, name.Func)
+ }
+ }
+
+ if types.IsExported(sym.Name) {
+ assert(!sym.OnExportList())
+ target.Exports = append(target.Exports, name)
+ sym.SetOnExportList(true)
+ }
+
+ if base.Flag.AsmHdr != "" {
+ assert(!sym.Asm())
+ target.Asms = append(target.Asms, name)
+ sym.SetAsm(true)
+ }
+ }
+
+ return nodes
+}
+
+// @@@ Inlining
+
+var inlgen = 0
+
+func InlineCall(call *ir.CallExpr, fn *ir.Func, inlIndex int) *ir.InlinedCallExpr {
+ // TODO(mdempsky): Turn callerfn into an explicit parameter.
+ callerfn := ir.CurFunc
+
+ pri, ok := bodyReader[fn]
+ if !ok {
+ // Assume it's an imported function or something that we don't
+ // have access to in quirks mode.
+ if haveLegacyImports {
+ return nil
+ }
+
+ base.FatalfAt(call.Pos(), "missing function body for call to %v", fn)
+ }
+
+ if fn.Inl.Body == nil {
+ expandInline(fn, pri)
+ }
+
+ r := pri.asReader(relocBody, syncFuncBody)
+
+ // TODO(mdempsky): This still feels clumsy. Can we do better?
+ tmpfn := ir.NewFunc(fn.Pos())
+ tmpfn.Nname = ir.NewNameAt(fn.Nname.Pos(), callerfn.Sym())
+ tmpfn.Closgen = callerfn.Closgen
+ defer func() { callerfn.Closgen = tmpfn.Closgen }()
+
+ r.setType(tmpfn.Nname, fn.Type())
+ r.curfn = tmpfn
+
+ r.inlCaller = callerfn
+ r.inlCall = call
+ r.inlFunc = fn
+ r.inlTreeIndex = inlIndex
+ r.inlPosBases = make(map[*src.PosBase]*src.PosBase)
+
+ r.closureVars = make([]*ir.Name, len(r.inlFunc.ClosureVars))
+ for i, cv := range r.inlFunc.ClosureVars {
+ r.closureVars[i] = cv.Outer
+ }
+
+ r.funcargs(fn)
+
+ assert(r.bool()) // have body
+ r.delayResults = fn.Inl.CanDelayResults
+
+ r.retlabel = typecheck.AutoLabel(".i")
+ inlgen++
+
+ init := ir.TakeInit(call)
+
+ // For normal function calls, the function callee expression
+ // may contain side effects. Make sure to preserve these,
+ // if necessary (#42703).
+ if call.Op() == ir.OCALLFUNC {
+ inline.CalleeEffects(&init, call.X)
+ }
+
+ var args ir.Nodes
+ if call.Op() == ir.OCALLMETH {
+ base.FatalfAt(call.Pos(), "OCALLMETH missed by typecheck")
+ }
+ args.Append(call.Args...)
+
+ // Create assignment to declare and initialize inlvars.
+ as2 := ir.NewAssignListStmt(call.Pos(), ir.OAS2, r.inlvars, args)
+ as2.Def = true
+ var as2init ir.Nodes
+ for _, name := range r.inlvars {
+ if ir.IsBlank(name) {
+ continue
+ }
+ // TODO(mdempsky): Use inlined position of name.Pos() instead?
+ name := name.(*ir.Name)
+ as2init.Append(ir.NewDecl(call.Pos(), ir.ODCL, name))
+ name.Defn = as2
+ }
+ as2.SetInit(as2init)
+ init.Append(typecheck.Stmt(as2))
+
+ if !r.delayResults {
+ // If not delaying retvars, declare and zero initialize the
+ // result variables now.
+ for _, name := range r.retvars {
+ // TODO(mdempsky): Use inlined position of name.Pos() instead?
+ name := name.(*ir.Name)
+ init.Append(ir.NewDecl(call.Pos(), ir.ODCL, name))
+ ras := ir.NewAssignStmt(call.Pos(), name, nil)
+ init.Append(typecheck.Stmt(ras))
+ }
+ }
+
+ // Add an inline mark just before the inlined body.
+ // This mark is inline in the code so that it's a reasonable spot
+ // to put a breakpoint. Not sure if that's really necessary or not
+ // (in which case it could go at the end of the function instead).
+ // Note issue 28603.
+ init.Append(ir.NewInlineMarkStmt(call.Pos().WithIsStmt(), int64(r.inlTreeIndex)))
+
+ nparams := len(r.curfn.Dcl)
+
+ ir.WithFunc(r.curfn, func() {
+ r.curfn.Body = r.stmts()
+ r.curfn.Endlineno = r.pos()
+
+ deadcode.Func(r.curfn)
+
+ // Replace any "return" statements within the function body.
+ var edit func(ir.Node) ir.Node
+ edit = func(n ir.Node) ir.Node {
+ if ret, ok := n.(*ir.ReturnStmt); ok {
+ n = typecheck.Stmt(r.inlReturn(ret))
+ }
+ ir.EditChildren(n, edit)
+ return n
+ }
+ edit(r.curfn)
+ })
+
+ body := ir.Nodes(r.curfn.Body)
+
+ // Quirk: If deadcode elimination turned a non-empty function into
+ // an empty one, we need to set the position for the empty block
+ // left behind to the the inlined position for src.NoXPos, so that
+ // an empty string gets added into the DWARF file name listing at
+ // the appropriate index.
+ if quirksMode() && len(body) == 1 {
+ if block, ok := body[0].(*ir.BlockStmt); ok && len(block.List) == 0 {
+ block.SetPos(r.updatePos(src.NoXPos))
+ }
+ }
+
+ // Quirkish: We need to eagerly prune variables added during
+ // inlining, but removed by deadcode.FuncBody above. Unused
+ // variables will get removed during stack frame layout anyway, but
+ // len(fn.Dcl) ends up influencing things like autotmp naming.
+
+ used := usedLocals(body)
+
+ for i, name := range r.curfn.Dcl {
+ if i < nparams || used.Has(name) {
+ name.Curfn = callerfn
+ callerfn.Dcl = append(callerfn.Dcl, name)
+
+ // Quirkish. TODO(mdempsky): Document why.
+ if name.AutoTemp() {
+ name.SetEsc(ir.EscUnknown)
+
+ if base.Flag.GenDwarfInl != 0 {
+ name.SetInlLocal(true)
+ } else {
+ name.SetPos(r.inlCall.Pos())
+ }
+ }
+ }
+ }
+
+ body.Append(ir.NewLabelStmt(call.Pos(), r.retlabel))
+
+ res := ir.NewInlinedCallExpr(call.Pos(), body, append([]ir.Node(nil), r.retvars...))
+ res.SetInit(init)
+ res.SetType(call.Type())
+ res.SetTypecheck(1)
+
+ // Inlining shouldn't add any functions to todoBodies.
+ assert(len(todoBodies) == 0)
+
+ return res
+}
+
+// inlReturn returns a statement that can substitute for the given
+// return statement when inlining.
+func (r *reader) inlReturn(ret *ir.ReturnStmt) *ir.BlockStmt {
+ pos := r.inlCall.Pos()
+
+ block := ir.TakeInit(ret)
+
+ if results := ret.Results; len(results) != 0 {
+ assert(len(r.retvars) == len(results))
+
+ as2 := ir.NewAssignListStmt(pos, ir.OAS2, append([]ir.Node(nil), r.retvars...), ret.Results)
+
+ if r.delayResults {
+ for _, name := range r.retvars {
+ // TODO(mdempsky): Use inlined position of name.Pos() instead?
+ name := name.(*ir.Name)
+ block.Append(ir.NewDecl(pos, ir.ODCL, name))
+ name.Defn = as2
+ }
+ }
+
+ block.Append(as2)
+ }
+
+ block.Append(ir.NewBranchStmt(pos, ir.OGOTO, r.retlabel))
+ return ir.NewBlockStmt(pos, block)
+}
+
+// expandInline reads in an extra copy of IR to populate
+// fn.Inl.{Dcl,Body}.
+func expandInline(fn *ir.Func, pri pkgReaderIndex) {
+ // TODO(mdempsky): Remove this function. It's currently needed by
+ // dwarfgen/dwarf.go:preInliningDcls, which requires fn.Inl.Dcl to
+ // create abstract function DIEs. But we should be able to provide it
+ // with the same information some other way.
+
+ fndcls := len(fn.Dcl)
+ topdcls := len(typecheck.Target.Decls)
+
+ tmpfn := ir.NewFunc(fn.Pos())
+ tmpfn.Nname = ir.NewNameAt(fn.Nname.Pos(), fn.Sym())
+ tmpfn.ClosureVars = fn.ClosureVars
+
+ {
+ r := pri.asReader(relocBody, syncFuncBody)
+ r.setType(tmpfn.Nname, fn.Type())
+
+ // Don't change parameter's Sym/Nname fields.
+ r.funarghack = true
+
+ r.funcBody(tmpfn)
+
+ ir.WithFunc(tmpfn, func() {
+ deadcode.Func(tmpfn)
+ })
+ }
+
+ used := usedLocals(tmpfn.Body)
+
+ for _, name := range tmpfn.Dcl {
+ if name.Class != ir.PAUTO || used.Has(name) {
+ name.Curfn = fn
+ fn.Inl.Dcl = append(fn.Inl.Dcl, name)
+ }
+ }
+ fn.Inl.Body = tmpfn.Body
+
+ // Double check that we didn't change fn.Dcl by accident.
+ assert(fndcls == len(fn.Dcl))
+
+ // typecheck.Stmts may have added function literals to
+ // typecheck.Target.Decls. Remove them again so we don't risk trying
+ // to compile them multiple times.
+ typecheck.Target.Decls = typecheck.Target.Decls[:topdcls]
+}
+
+// usedLocals returns a set of local variables that are used within body.
+func usedLocals(body []ir.Node) ir.NameSet {
+ var used ir.NameSet
+ ir.VisitList(body, func(n ir.Node) {
+ if n, ok := n.(*ir.Name); ok && n.Op() == ir.ONAME && n.Class == ir.PAUTO {
+ used.Add(n)
+ }
+ })
+ return used
+}
+
+// @@@ Method wrappers
+
+// needWrapperTypes lists types for which we may need to generate
+// method wrappers.
+var needWrapperTypes []*types.Type
+
+// haveWrapperTypes lists types for which we know we already have
+// method wrappers, because we found the type in an imported package.
+var haveWrapperTypes []*types.Type
+
+func (r *reader) needWrapper(typ *types.Type) *types.Type {
+ if typ.IsPtr() {
+ base.Fatalf("bad pointer type: %v", typ)
+ }
+
+ // If a type was found in an imported package, then we can assume
+ // that package (or one of its transitive dependencies) already
+ // generated method wrappers for it.
+ //
+ // Exception: If we're instantiating an imported generic type or
+ // function, we might be instantiating it with type arguments not
+ // previously seen before.
+ //
+ // TODO(mdempsky): Distinguish when a generic function or type was
+ // instantiated in an imported package so that we can add types to
+ // haveWrapperTypes instead.
+ if r.p != localPkgReader && !r.hasTypeParams() {
+ haveWrapperTypes = append(haveWrapperTypes, typ)
+ } else {
+ needWrapperTypes = append(needWrapperTypes, typ)
+ }
+
+ return typ
+}
+
+func (r *reader) wrapTypes(target *ir.Package) {
+ // always generate a wrapper for error.Error (#29304)
+ r.needWrapper(types.ErrorType)
+
+ seen := make(map[string]*types.Type)
+ addType := func(typ *types.Type) bool {
+ if typ.Sym() != nil {
+ return true
+ }
+
+ key := typ.LinkString()
+ if prev := seen[key]; prev != nil {
+ if !types.Identical(typ, prev) {
+ base.Fatalf("collision: types %v and %v have short string %q", typ, prev, key)
+ }
+ return false
+ }
+
+ seen[key] = typ
+ return true
+ }
+
+ for _, typ := range haveWrapperTypes {
+ addType(typ)
+ }
+ haveWrapperTypes = nil
+
+ for _, typ := range needWrapperTypes {
+ if addType(typ) {
+ r.wrapType(typ, target)
+ }
+ }
+ needWrapperTypes = nil
+}
+
+func (r *reader) wrapType(typ *types.Type, target *ir.Package) {
+ if !typ.IsInterface() {
+ typecheck.CalcMethods(typ)
+ }
+ for _, meth := range typ.AllMethods().Slice() {
+ if meth.Sym.IsBlank() || !meth.IsMethod() {
+ base.FatalfAt(meth.Pos, "invalid method: %v", meth)
+ }
+
+ r.methodValueWrapper(typ, meth, target)
+
+ r.methodWrapper(0, typ, meth, target)
+
+ // For non-interface types, we also want *T wrappers.
+ if !typ.IsInterface() {
+ r.methodWrapper(1, typ, meth, target)
+
+ // For not-in-heap types, *T is a scalar, not pointer shaped,
+ // so the interface wrappers use **T.
+ if typ.NotInHeap() {
+ r.methodWrapper(2, typ, meth, target)
+ }
+ }
+ }
+}
+
+func (r *reader) methodWrapper(derefs int, tbase *types.Type, method *types.Field, target *ir.Package) {
+ wrapper := tbase
+ for i := 0; i < derefs; i++ {
+ wrapper = types.NewPtr(wrapper)
+ }
+
+ sym := ir.MethodSym(wrapper, method.Sym)
+ assert(!sym.Siggen())
+ sym.SetSiggen(true)
+
+ wrappee := method.Type.Recv().Type
+ if types.Identical(wrapper, wrappee) ||
+ !types.IsMethodApplicable(wrapper, method) ||
+ !reflectdata.NeedEmit(tbase) {
+ return
+ }
+
+ // TODO(mdempsky): Use method.Pos instead?
+ pos := base.AutogeneratedPos
+
+ fn := r.newWrapperFunc(pos, sym, wrapper, method)
+
+ var recv ir.Node = fn.Nname.Type().Recv().Nname.(*ir.Name)
+
+ // For simple *T wrappers around T methods, panicwrap produces a
+ // nicer panic message.
+ if wrapper.IsPtr() && types.Identical(wrapper.Elem(), wrappee) {
+ cond := ir.NewBinaryExpr(pos, ir.OEQ, recv, types.BuiltinPkg.Lookup("nil").Def.(ir.Node))
+ then := []ir.Node{ir.NewCallExpr(pos, ir.OCALL, typecheck.LookupRuntime("panicwrap"), nil)}
+ fn.Body.Append(ir.NewIfStmt(pos, cond, then, nil))
+ }
+
+ // typecheck will add one implicit deref, if necessary,
+ // but not-in-heap types require more for their **T wrappers.
+ for i := 1; i < derefs; i++ {
+ recv = Implicit(ir.NewStarExpr(pos, recv))
+ }
+
+ addTailCall(pos, fn, recv, method)
+
+ r.finishWrapperFunc(fn, target)
+}
+
+func (r *reader) methodValueWrapper(tbase *types.Type, method *types.Field, target *ir.Package) {
+ recvType := tbase
+ if !tbase.IsInterface() {
+ recvType = method.Type.Recv().Type
+ if !types.Identical(tbase, types.ReceiverBaseType(recvType)) {
+ return
+ }
+ }
+
+ sym := ir.MethodSymSuffix(recvType, method.Sym, "-fm")
+ assert(!sym.Uniq())
+ sym.SetUniq(true)
+
+ // TODO(mdempsky): Use method.Pos instead?
+ pos := base.AutogeneratedPos
+
+ fn := r.newWrapperFunc(pos, sym, nil, method)
+ sym.Def = fn.Nname
+
+ // Declare and initialize variable holding receiver.
+ recv := ir.NewHiddenParam(pos, fn, typecheck.Lookup(".this"), recvType)
+
+ if !reflectdata.NeedEmit(tbase) {
+ typecheck.Func(fn)
+ return
+ }
+
+ addTailCall(pos, fn, recv, method)
+
+ r.finishWrapperFunc(fn, target)
+}
+
+func (r *reader) newWrapperFunc(pos src.XPos, sym *types.Sym, wrapper *types.Type, method *types.Field) *ir.Func {
+ fn := ir.NewFunc(pos)
+ fn.SetDupok(true) // TODO(mdempsky): Leave unset for local, non-generic wrappers?
+
+ name := ir.NewNameAt(pos, sym)
+ ir.MarkFunc(name)
+ name.Func = fn
+ name.Defn = fn
+ fn.Nname = name
+
+ sig := newWrapperType(wrapper, method)
+ r.setType(name, sig)
+
+ // TODO(mdempsky): De-duplicate with similar logic in funcargs.
+ defParams := func(class ir.Class, params *types.Type) {
+ for _, param := range params.FieldSlice() {
+ name := ir.NewNameAt(param.Pos, param.Sym)
+ name.Class = class
+ r.setType(name, param.Type)
+
+ name.Curfn = fn
+ fn.Dcl = append(fn.Dcl, name)
+
+ param.Nname = name
+ }
+ }
+
+ defParams(ir.PPARAM, sig.Recvs())
+ defParams(ir.PPARAM, sig.Params())
+ defParams(ir.PPARAMOUT, sig.Results())
+
+ return fn
+}
+
+func (r *reader) finishWrapperFunc(fn *ir.Func, target *ir.Package) {
+ typecheck.Func(fn)
+
+ ir.WithFunc(fn, func() {
+ typecheck.Stmts(fn.Body)
+ })
+
+ target.Decls = append(target.Decls, fn)
+}
+
+// newWrapperType returns a copy of the given signature type, but with
+// the receiver parameter type substituted with recvType.
+// If recvType is nil, newWrapperType returns a signature
+// without a receiver parameter.
+func newWrapperType(recvType *types.Type, method *types.Field) *types.Type {
+ clone := func(params []*types.Field) []*types.Field {
+ res := make([]*types.Field, len(params))
+ for i, param := range params {
+ sym := param.Sym
+ if sym == nil || sym.Name == "_" {
+ sym = typecheck.LookupNum(".anon", i)
+ }
+ res[i] = types.NewField(param.Pos, sym, param.Type)
+ res[i].SetIsDDD(param.IsDDD())
+ }
+ return res
+ }
+
+ sig := method.Type
+
+ var recv *types.Field
+ if recvType != nil {
+ recv = types.NewField(sig.Recv().Pos, typecheck.Lookup(".this"), recvType)
+ }
+ params := clone(sig.Params().FieldSlice())
+ results := clone(sig.Results().FieldSlice())
+
+ return types.NewSignature(types.NoPkg, recv, nil, params, results)
+}
+
+func addTailCall(pos src.XPos, fn *ir.Func, recv ir.Node, method *types.Field) {
+ sig := fn.Nname.Type()
+ args := make([]ir.Node, sig.NumParams())
+ for i, param := range sig.Params().FieldSlice() {
+ args[i] = param.Nname.(*ir.Name)
+ }
+
+ // TODO(mdempsky): Support creating OTAILCALL, when possible. See reflectdata.methodWrapper.
+ // Not urgent though, because tail calls are currently incompatible with regabi anyway.
+
+ fn.SetWrapper(true) // TODO(mdempsky): Leave unset for tail calls?
+
+ dot := ir.NewSelectorExpr(pos, ir.OXDOT, recv, method.Sym)
+ call := typecheck.Call(pos, dot, args, method.Type.IsVariadic()).(*ir.CallExpr)
+
+ if method.Type.NumResults() == 0 {
+ fn.Body.Append(call)
+ return
+ }
+
+ ret := ir.NewReturnStmt(pos, nil)
+ ret.Results = []ir.Node{call}
+ fn.Body.Append(ret)
+}
diff --git a/src/cmd/compile/internal/noder/reader2.go b/src/cmd/compile/internal/noder/reader2.go
new file mode 100644
index 0000000000..8f3f0a50e9
--- /dev/null
+++ b/src/cmd/compile/internal/noder/reader2.go
@@ -0,0 +1,509 @@
+// UNREVIEWED
+
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package noder
+
+import (
+ "go/constant"
+
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/syntax"
+ "cmd/compile/internal/types2"
+ "cmd/internal/src"
+)
+
+type pkgReader2 struct {
+ pkgDecoder
+
+ check *types2.Checker
+ imports map[string]*types2.Package
+
+ posBases []*syntax.PosBase
+ pkgs []*types2.Package
+ typs []types2.Type
+}
+
+func readPackage2(check *types2.Checker, imports map[string]*types2.Package, input pkgDecoder) *types2.Package {
+ pr := pkgReader2{
+ pkgDecoder: input,
+
+ check: check,
+ imports: imports,
+
+ posBases: make([]*syntax.PosBase, input.numElems(relocPosBase)),
+ pkgs: make([]*types2.Package, input.numElems(relocPkg)),
+ typs: make([]types2.Type, input.numElems(relocType)),
+ }
+
+ r := pr.newReader(relocMeta, publicRootIdx, syncPublic)
+ pkg := r.pkg()
+ r.bool() // has init
+
+ for i, n := 0, r.len(); i < n; i++ {
+ r.obj()
+ }
+
+ r.sync(syncEOF)
+
+ pkg.MarkComplete()
+ return pkg
+}
+
+type reader2 struct {
+ decoder
+
+ p *pkgReader2
+
+ dict *reader2Dict
+}
+
+type reader2Dict struct {
+ bounds []typeInfo
+
+ tparams []*types2.TypeParam
+
+ derived []derivedInfo
+ derivedTypes []types2.Type
+}
+
+type reader2TypeBound struct {
+ derived bool
+ boundIdx int
+}
+
+func (pr *pkgReader2) newReader(k reloc, idx int, marker syncMarker) *reader2 {
+ return &reader2{
+ decoder: pr.newDecoder(k, idx, marker),
+ p: pr,
+ }
+}
+
+// @@@ Positions
+
+func (r *reader2) pos() syntax.Pos {
+ r.sync(syncPos)
+ if !r.bool() {
+ return syntax.Pos{}
+ }
+
+ // TODO(mdempsky): Delta encoding.
+ posBase := r.posBase()
+ line := r.uint()
+ col := r.uint()
+ return syntax.MakePos(posBase, line, col)
+}
+
+func (r *reader2) posBase() *syntax.PosBase {
+ return r.p.posBaseIdx(r.reloc(relocPosBase))
+}
+
+func (pr *pkgReader2) posBaseIdx(idx int) *syntax.PosBase {
+ if b := pr.posBases[idx]; b != nil {
+ return b
+ }
+
+ r := pr.newReader(relocPosBase, idx, syncPosBase)
+ var b *syntax.PosBase
+
+ filename := r.string()
+
+ if r.bool() {
+ b = syntax.NewTrimmedFileBase(filename, true)
+ } else {
+ pos := r.pos()
+ line := r.uint()
+ col := r.uint()
+ b = syntax.NewLineBase(pos, filename, true, line, col)
+ }
+
+ pr.posBases[idx] = b
+ return b
+}
+
+// @@@ Packages
+
+func (r *reader2) pkg() *types2.Package {
+ r.sync(syncPkg)
+ return r.p.pkgIdx(r.reloc(relocPkg))
+}
+
+func (pr *pkgReader2) pkgIdx(idx int) *types2.Package {
+ // TODO(mdempsky): Consider using some non-nil pointer to indicate
+ // the universe scope, so we don't need to keep re-reading it.
+ if pkg := pr.pkgs[idx]; pkg != nil {
+ return pkg
+ }
+
+ pkg := pr.newReader(relocPkg, idx, syncPkgDef).doPkg()
+ pr.pkgs[idx] = pkg
+ return pkg
+}
+
+func (r *reader2) doPkg() *types2.Package {
+ path := r.string()
+ if path == "builtin" {
+ return nil // universe
+ }
+ if path == "" {
+ path = r.p.pkgPath
+ }
+
+ if pkg := r.p.imports[path]; pkg != nil {
+ return pkg
+ }
+
+ name := r.string()
+ height := r.len()
+
+ pkg := types2.NewPackageHeight(path, name, height)
+ r.p.imports[path] = pkg
+
+ // TODO(mdempsky): The list of imported packages is important for
+ // go/types, but we could probably skip populating it for types2.
+ imports := make([]*types2.Package, r.len())
+ for i := range imports {
+ imports[i] = r.pkg()
+ }
+ pkg.SetImports(imports)
+
+ return pkg
+}
+
+// @@@ Types
+
+func (r *reader2) typ() types2.Type {
+ return r.p.typIdx(r.typInfo(), r.dict)
+}
+
+func (r *reader2) typInfo() typeInfo {
+ r.sync(syncType)
+ if r.bool() {
+ return typeInfo{idx: r.len(), derived: true}
+ }
+ return typeInfo{idx: r.reloc(relocType), derived: false}
+}
+
+func (pr *pkgReader2) typIdx(info typeInfo, dict *reader2Dict) types2.Type {
+ idx := info.idx
+ var where *types2.Type
+ if info.derived {
+ where = &dict.derivedTypes[idx]
+ idx = dict.derived[idx].idx
+ } else {
+ where = &pr.typs[idx]
+ }
+
+ if typ := *where; typ != nil {
+ return typ
+ }
+
+ r := pr.newReader(relocType, idx, syncTypeIdx)
+ r.dict = dict
+
+ typ := r.doTyp()
+ assert(typ != nil)
+
+ // See comment in pkgReader.typIdx explaining how this happens.
+ if prev := *where; prev != nil {
+ return prev
+ }
+
+ *where = typ
+ return typ
+}
+
+func (r *reader2) doTyp() (res types2.Type) {
+ switch tag := codeType(r.code(syncType)); tag {
+ default:
+ base.FatalfAt(src.NoXPos, "unhandled type tag: %v", tag)
+ panic("unreachable")
+
+ case typeBasic:
+ return types2.Typ[r.len()]
+
+ case typeNamed:
+ obj, targs := r.obj()
+ name := obj.(*types2.TypeName)
+ if len(targs) != 0 {
+ t, _ := types2.Instantiate(types2.NewEnvironment(r.p.check), name.Type(), targs, false)
+ return t
+ }
+ return name.Type()
+
+ case typeTypeParam:
+ return r.dict.tparams[r.len()]
+
+ case typeArray:
+ len := int64(r.uint64())
+ return types2.NewArray(r.typ(), len)
+ case typeChan:
+ dir := types2.ChanDir(r.len())
+ return types2.NewChan(dir, r.typ())
+ case typeMap:
+ return types2.NewMap(r.typ(), r.typ())
+ case typePointer:
+ return types2.NewPointer(r.typ())
+ case typeSignature:
+ return r.signature(nil)
+ case typeSlice:
+ return types2.NewSlice(r.typ())
+ case typeStruct:
+ return r.structType()
+ case typeInterface:
+ return r.interfaceType()
+ case typeUnion:
+ return r.unionType()
+ }
+}
+
+func (r *reader2) structType() *types2.Struct {
+ fields := make([]*types2.Var, r.len())
+ var tags []string
+ for i := range fields {
+ pos := r.pos()
+ pkg, name := r.selector()
+ ftyp := r.typ()
+ tag := r.string()
+ embedded := r.bool()
+
+ fields[i] = types2.NewField(pos, pkg, name, ftyp, embedded)
+ if tag != "" {
+ for len(tags) < i {
+ tags = append(tags, "")
+ }
+ tags = append(tags, tag)
+ }
+ }
+ return types2.NewStruct(fields, tags)
+}
+
+func (r *reader2) unionType() *types2.Union {
+ terms := make([]*types2.Term, r.len())
+ for i := range terms {
+ terms[i] = types2.NewTerm(r.bool(), r.typ())
+ }
+ return types2.NewUnion(terms)
+}
+
+func (r *reader2) interfaceType() *types2.Interface {
+ methods := make([]*types2.Func, r.len())
+ embeddeds := make([]types2.Type, r.len())
+
+ for i := range methods {
+ pos := r.pos()
+ pkg, name := r.selector()
+ mtyp := r.signature(nil)
+ methods[i] = types2.NewFunc(pos, pkg, name, mtyp)
+ }
+
+ for i := range embeddeds {
+ embeddeds[i] = r.typ()
+ }
+
+ return types2.NewInterfaceType(methods, embeddeds)
+}
+
+func (r *reader2) signature(recv *types2.Var) *types2.Signature {
+ r.sync(syncSignature)
+
+ params := r.params()
+ results := r.params()
+ variadic := r.bool()
+
+ return types2.NewSignature(recv, params, results, variadic)
+}
+
+func (r *reader2) params() *types2.Tuple {
+ r.sync(syncParams)
+ params := make([]*types2.Var, r.len())
+ for i := range params {
+ params[i] = r.param()
+ }
+ return types2.NewTuple(params...)
+}
+
+func (r *reader2) param() *types2.Var {
+ r.sync(syncParam)
+
+ pos := r.pos()
+ pkg, name := r.localIdent()
+ typ := r.typ()
+
+ return types2.NewParam(pos, pkg, name, typ)
+}
+
+// @@@ Objects
+
+func (r *reader2) obj() (types2.Object, []types2.Type) {
+ r.sync(syncObject)
+
+ assert(!r.bool())
+
+ pkg, name := r.p.objIdx(r.reloc(relocObj))
+ obj := pkg.Scope().Lookup(name)
+
+ targs := make([]types2.Type, r.len())
+ for i := range targs {
+ targs[i] = r.typ()
+ }
+
+ return obj, targs
+}
+
+func (pr *pkgReader2) objIdx(idx int) (*types2.Package, string) {
+ rname := pr.newReader(relocName, idx, syncObject1)
+
+ objPkg, objName := rname.qualifiedIdent()
+ assert(objName != "")
+
+ tag := codeObj(rname.code(syncCodeObj))
+
+ if tag == objStub {
+ assert(objPkg == nil)
+ return objPkg, objName
+ }
+
+ dict := pr.objDictIdx(idx)
+
+ r := pr.newReader(relocObj, idx, syncObject1)
+ r.dict = dict
+
+ objPkg.Scope().InsertLazy(objName, func() types2.Object {
+ switch tag {
+ default:
+ panic("weird")
+
+ case objAlias:
+ pos := r.pos()
+ typ := r.typ()
+ return types2.NewTypeName(pos, objPkg, objName, typ)
+
+ case objConst:
+ pos := r.pos()
+ typ, val := r.value()
+ return types2.NewConst(pos, objPkg, objName, typ, val)
+
+ case objFunc:
+ pos := r.pos()
+ tparams := r.typeParamNames()
+ sig := r.signature(nil)
+ sig.SetTParams(tparams)
+ return types2.NewFunc(pos, objPkg, objName, sig)
+
+ case objType:
+ pos := r.pos()
+
+ return types2.NewTypeNameLazy(pos, objPkg, objName, func(named *types2.Named) (tparams []*types2.TypeParam, underlying types2.Type, methods []*types2.Func) {
+ tparams = r.typeParamNames()
+
+ // TODO(mdempsky): Rewrite receiver types to underlying is an
+ // Interface? The go/types importer does this (I think because
+ // unit tests expected that), but cmd/compile doesn't care
+ // about it, so maybe we can avoid worrying about that here.
+ underlying = r.typ().Underlying()
+
+ methods = make([]*types2.Func, r.len())
+ for i := range methods {
+ methods[i] = r.method()
+ }
+
+ return
+ })
+
+ case objVar:
+ pos := r.pos()
+ typ := r.typ()
+ return types2.NewVar(pos, objPkg, objName, typ)
+ }
+ })
+
+ return objPkg, objName
+}
+
+func (r *reader2) value() (types2.Type, constant.Value) {
+ r.sync(syncValue)
+ return r.typ(), r.rawValue()
+}
+
+func (pr *pkgReader2) objDictIdx(idx int) *reader2Dict {
+ r := pr.newReader(relocObjDict, idx, syncObject1)
+
+ var dict reader2Dict
+
+ if implicits := r.len(); implicits != 0 {
+ base.Fatalf("unexpected object with %v implicit type parameter(s)", implicits)
+ }
+
+ dict.bounds = make([]typeInfo, r.len())
+ for i := range dict.bounds {
+ dict.bounds[i] = r.typInfo()
+ }
+
+ dict.derived = make([]derivedInfo, r.len())
+ dict.derivedTypes = make([]types2.Type, len(dict.derived))
+ for i := range dict.derived {
+ dict.derived[i] = derivedInfo{r.reloc(relocType), r.bool()}
+ }
+
+ // function references follow, but reader2 doesn't need those
+
+ return &dict
+}
+
+func (r *reader2) typeParamNames() []*types2.TypeParam {
+ r.sync(syncTypeParamNames)
+
+ // Note: This code assumes it only processes objects without
+ // implement type parameters. This is currently fine, because
+ // reader2 is only used to read in exported declarations, which are
+ // always package scoped.
+
+ if len(r.dict.bounds) == 0 {
+ return nil
+ }
+
+ // Careful: Type parameter lists may have cycles. To allow for this,
+ // we construct the type parameter list in two passes: first we
+ // create all the TypeNames and TypeParams, then we construct and
+ // set the bound type.
+
+ r.dict.tparams = make([]*types2.TypeParam, len(r.dict.bounds))
+ for i := range r.dict.bounds {
+ pos := r.pos()
+ pkg, name := r.localIdent()
+
+ tname := types2.NewTypeName(pos, pkg, name, nil)
+ r.dict.tparams[i] = r.p.check.NewTypeParam(tname, nil)
+ }
+
+ for i, bound := range r.dict.bounds {
+ r.dict.tparams[i].SetConstraint(r.p.typIdx(bound, r.dict))
+ }
+
+ return r.dict.tparams
+}
+
+func (r *reader2) method() *types2.Func {
+ r.sync(syncMethod)
+ pos := r.pos()
+ pkg, name := r.selector()
+
+ rparams := r.typeParamNames()
+ sig := r.signature(r.param())
+ sig.SetRParams(rparams)
+
+ _ = r.pos() // TODO(mdempsky): Remove; this is a hacker for linker.go.
+ return types2.NewFunc(pos, pkg, name, sig)
+}
+
+func (r *reader2) qualifiedIdent() (*types2.Package, string) { return r.ident(syncSym) }
+func (r *reader2) localIdent() (*types2.Package, string) { return r.ident(syncLocalIdent) }
+func (r *reader2) selector() (*types2.Package, string) { return r.ident(syncSelector) }
+
+func (r *reader2) ident(marker syncMarker) (*types2.Package, string) {
+ r.sync(marker)
+ return r.pkg(), r.string()
+}
diff --git a/src/cmd/compile/internal/noder/reloc.go b/src/cmd/compile/internal/noder/reloc.go
new file mode 100644
index 0000000000..669a6182e6
--- /dev/null
+++ b/src/cmd/compile/internal/noder/reloc.go
@@ -0,0 +1,42 @@
+// UNREVIEWED
+
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package noder
+
+// A reloc indicates a particular section within a unified IR export.
+//
+// TODO(mdempsky): Rename to "section" or something similar?
+type reloc int
+
+// A relocEnt (relocation entry) is an entry in an atom's local
+// reference table.
+//
+// TODO(mdempsky): Rename this too.
+type relocEnt struct {
+ kind reloc
+ idx int
+}
+
+// Reserved indices within the meta relocation section.
+const (
+ publicRootIdx = 0
+ privateRootIdx = 1
+)
+
+const (
+ relocString reloc = iota
+ relocMeta
+ relocPosBase
+ relocPkg
+ relocName
+ relocType
+ relocObj
+ relocObjExt
+ relocObjDict
+ relocBody
+
+ numRelocs = iota
+)
diff --git a/src/cmd/compile/internal/noder/stencil.go b/src/cmd/compile/internal/noder/stencil.go
index 3ebc8dff6d..602e88c102 100644
--- a/src/cmd/compile/internal/noder/stencil.go
+++ b/src/cmd/compile/internal/noder/stencil.go
@@ -8,21 +8,32 @@
package noder
import (
- "bytes"
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
+ "cmd/compile/internal/objw"
+ "cmd/compile/internal/reflectdata"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
+ "cmd/internal/obj"
"cmd/internal/src"
"fmt"
- "strings"
+ "go/constant"
)
-// For catching problems as we add more features
-// TODO(danscales): remove assertions or replace with base.FatalfAt()
+// Enable extra consistency checks.
+const doubleCheck = true
+
func assert(p bool) {
- if !p {
- panic("assertion failed")
+ base.Assert(p)
+}
+
+// Temporary - for outputting information on derived types, dictionaries, sub-dictionaries.
+// Turn off when running tests.
+var infoPrintMode = false
+
+func infoPrint(format string, a ...interface{}) {
+ if infoPrintMode {
+ fmt.Printf(format, a...)
}
}
@@ -32,7 +43,8 @@ func assert(p bool) {
// encountered already or new ones that are encountered during the stenciling
// process.
func (g *irgen) stencil() {
- g.target.Stencils = make(map[*types.Sym]*ir.Func)
+ g.instInfoMap = make(map[*types.Sym]*instInfo)
+ g.gfInfoMap = make(map[*types.Sym]*gfInfo)
// Instantiate the methods of instantiated generic types that we have seen so far.
g.instantiateMethods()
@@ -72,56 +84,138 @@ func (g *irgen) stencil() {
// instantiated function if it hasn't been created yet, and change
// to calling that function directly.
modified := false
- foundFuncInst := false
+ closureRequired := false
+ // declInfo will be non-nil exactly if we are scanning an instantiated function
+ declInfo := g.instInfoMap[decl.Sym()]
+
ir.Visit(decl, func(n ir.Node) {
if n.Op() == ir.OFUNCINST {
- // We found a function instantiation that is not
- // immediately called.
- foundFuncInst = true
+ // generic F, not immediately called
+ closureRequired = true
}
- if n.Op() != ir.OCALL || n.(*ir.CallExpr).X.Op() != ir.OFUNCINST {
- return
+ if (n.Op() == ir.OMETHEXPR || n.Op() == ir.OMETHVALUE) && len(deref(n.(*ir.SelectorExpr).X.Type()).RParams()) > 0 && !types.IsInterfaceMethod(n.(*ir.SelectorExpr).Selection.Type) {
+ // T.M or x.M, where T or x is generic, but not immediately
+ // called. Not necessary if the method selected is
+ // actually for an embedded interface field.
+ closureRequired = true
+ }
+ if n.Op() == ir.OCALL && n.(*ir.CallExpr).X.Op() == ir.OFUNCINST {
+ // We have found a function call using a generic function
+ // instantiation.
+ call := n.(*ir.CallExpr)
+ inst := call.X.(*ir.InstExpr)
+ nameNode, isMeth := g.getInstNameNode(inst)
+ targs := typecheck.TypesOf(inst.Targs)
+ st := g.getInstantiation(nameNode, targs, isMeth)
+ dictValue, usingSubdict := g.getDictOrSubdict(declInfo, n, nameNode, targs, isMeth)
+ if infoPrintMode {
+ dictkind := "Main dictionary"
+ if usingSubdict {
+ dictkind = "Sub-dictionary"
+ }
+ if inst.X.Op() == ir.OMETHVALUE {
+ fmt.Printf("%s in %v at generic method call: %v - %v\n", dictkind, decl, inst.X, call)
+ } else {
+ fmt.Printf("%s in %v at generic function call: %v - %v\n", dictkind, decl, inst.X, call)
+ }
+ }
+
+ // Transform the Call now, which changes OCALL to
+ // OCALLFUNC and does typecheckaste/assignconvfn. Do
+ // it before installing the instantiation, so we are
+ // checking against non-shape param types in
+ // typecheckaste.
+ transformCall(call)
+
+ // Replace the OFUNCINST with a direct reference to the
+ // new stenciled function
+ call.X = st.Nname
+ if inst.X.Op() == ir.OMETHVALUE {
+ // When we create an instantiation of a method
+ // call, we make it a function. So, move the
+ // receiver to be the first arg of the function
+ // call.
+ call.Args.Prepend(inst.X.(*ir.SelectorExpr).X)
+ }
+
+ // Add dictionary to argument list.
+ call.Args.Prepend(dictValue)
+ modified = true
+ }
+ if n.Op() == ir.OCALLMETH && n.(*ir.CallExpr).X.Op() == ir.ODOTMETH && len(deref(n.(*ir.CallExpr).X.Type().Recv().Type).RParams()) > 0 {
+ // Method call on a generic type, which was instantiated by stenciling.
+ // Method calls on explicitly instantiated types will have an OFUNCINST
+ // and are handled above.
+ call := n.(*ir.CallExpr)
+ meth := call.X.(*ir.SelectorExpr)
+ targs := deref(meth.Type().Recv().Type).RParams()
+
+ t := meth.X.Type()
+ baseSym := deref(t).OrigSym
+ baseType := baseSym.Def.(*ir.Name).Type()
+ var gf *ir.Name
+ for _, m := range baseType.Methods().Slice() {
+ if meth.Sel == m.Sym {
+ gf = m.Nname.(*ir.Name)
+ break
+ }
+ }
+
+ // Transform the Call now, which changes OCALL
+ // to OCALLFUNC and does typecheckaste/assignconvfn.
+ transformCall(call)
+
+ st := g.getInstantiation(gf, targs, true)
+ dictValue, usingSubdict := g.getDictOrSubdict(declInfo, n, gf, targs, true)
+ // We have to be using a subdictionary, since this is
+ // a generic method call.
+ assert(usingSubdict)
+
+ // Transform to a function call, by appending the
+ // dictionary and the receiver to the args.
+ call.SetOp(ir.OCALLFUNC)
+ call.X = st.Nname
+ call.Args.Prepend(dictValue, meth.X)
+ modified = true
}
- // We have found a function call using a generic function
- // instantiation.
- call := n.(*ir.CallExpr)
- inst := call.X.(*ir.InstExpr)
- st := g.getInstantiationForNode(inst)
- // Replace the OFUNCINST with a direct reference to the
- // new stenciled function
- call.X = st.Nname
- if inst.X.Op() == ir.OCALLPART {
- // When we create an instantiation of a method
- // call, we make it a function. So, move the
- // receiver to be the first arg of the function
- // call.
- withRecv := make([]ir.Node, len(call.Args)+1)
- dot := inst.X.(*ir.SelectorExpr)
- withRecv[0] = dot.X
- copy(withRecv[1:], call.Args)
- call.Args = withRecv
- }
- // Transform the Call now, which changes OCALL
- // to OCALLFUNC and does typecheckaste/assignconvfn.
- transformCall(call)
- modified = true
})
- // If we found an OFUNCINST without a corresponding call in the
- // above decl, then traverse the nodes of decl again (with
+ // If we found a reference to a generic instantiation that wasn't an
+ // immediate call, then traverse the nodes of decl again (with
// EditChildren rather than Visit), where we actually change the
- // OFUNCINST node to an ONAME for the instantiated function.
+ // reference to the instantiation to a closure that captures the
+ // dictionary, then does a direct call.
// EditChildren is more expensive than Visit, so we only do this
- // in the infrequent case of an OFUNCINSt without a corresponding
+ // in the infrequent case of an OFUNCINST without a corresponding
// call.
- if foundFuncInst {
+ if closureRequired {
+ modified = true
var edit func(ir.Node) ir.Node
+ var outer *ir.Func
+ if f, ok := decl.(*ir.Func); ok {
+ outer = f
+ }
edit = func(x ir.Node) ir.Node {
if x.Op() == ir.OFUNCINST {
- st := g.getInstantiationForNode(x.(*ir.InstExpr))
- return st.Nname
+ child := x.(*ir.InstExpr).X
+ if child.Op() == ir.OMETHEXPR || child.Op() == ir.OMETHVALUE {
+ // Call EditChildren on child (x.X),
+ // not x, so that we don't do
+ // buildClosure() on the
+ // METHEXPR/METHVALUE nodes as well.
+ ir.EditChildren(child, edit)
+ return g.buildClosure(outer, x)
+ }
}
ir.EditChildren(x, edit)
+ switch {
+ case x.Op() == ir.OFUNCINST:
+ return g.buildClosure(outer, x)
+ case (x.Op() == ir.OMETHEXPR || x.Op() == ir.OMETHVALUE) &&
+ len(deref(x.(*ir.SelectorExpr).X.Type()).RParams()) > 0 &&
+ !types.IsInterfaceMethod(x.(*ir.SelectorExpr).Selection.Type):
+ return g.buildClosure(outer, x)
+ }
return x
}
edit(decl)
@@ -137,104 +231,392 @@ func (g *irgen) stencil() {
g.instantiateMethods()
}
+ g.finalizeSyms()
+}
+
+// buildClosure makes a closure to implement x, a OFUNCINST or OMETHEXPR
+// of generic type. outer is the containing function (or nil if closure is
+// in a global assignment instead of a function).
+func (g *irgen) buildClosure(outer *ir.Func, x ir.Node) ir.Node {
+ pos := x.Pos()
+ var target *ir.Func // target instantiated function/method
+ var dictValue ir.Node // dictionary to use
+ var rcvrValue ir.Node // receiver, if a method value
+ typ := x.Type() // type of the closure
+ var outerInfo *instInfo
+ if outer != nil {
+ outerInfo = g.instInfoMap[outer.Sym()]
+ }
+ usingSubdict := false
+ valueMethod := false
+ if x.Op() == ir.OFUNCINST {
+ inst := x.(*ir.InstExpr)
+
+ // Type arguments we're instantiating with.
+ targs := typecheck.TypesOf(inst.Targs)
+
+ // Find the generic function/method.
+ var gf *ir.Name
+ if inst.X.Op() == ir.ONAME {
+ // Instantiating a generic function call.
+ gf = inst.X.(*ir.Name)
+ } else if inst.X.Op() == ir.OMETHVALUE {
+ // Instantiating a method value x.M.
+ se := inst.X.(*ir.SelectorExpr)
+ rcvrValue = se.X
+ gf = se.Selection.Nname.(*ir.Name)
+ } else {
+ panic("unhandled")
+ }
+
+ // target is the instantiated function we're trying to call.
+ // For functions, the target expects a dictionary as its first argument.
+ // For method values, the target expects a dictionary and the receiver
+ // as its first two arguments.
+ // dictValue is the value to use for the dictionary argument.
+ target = g.getInstantiation(gf, targs, rcvrValue != nil)
+ dictValue, usingSubdict = g.getDictOrSubdict(outerInfo, x, gf, targs, rcvrValue != nil)
+ if infoPrintMode {
+ dictkind := "Main dictionary"
+ if usingSubdict {
+ dictkind = "Sub-dictionary"
+ }
+ if rcvrValue == nil {
+ fmt.Printf("%s in %v for generic function value %v\n", dictkind, outer, inst.X)
+ } else {
+ fmt.Printf("%s in %v for generic method value %v\n", dictkind, outer, inst.X)
+ }
+ }
+ } else { // ir.OMETHEXPR or ir.METHVALUE
+ // Method expression T.M where T is a generic type.
+ se := x.(*ir.SelectorExpr)
+ targs := deref(se.X.Type()).RParams()
+ if len(targs) == 0 {
+ panic("bad")
+ }
+ if x.Op() == ir.OMETHVALUE {
+ rcvrValue = se.X
+ }
+
+ // se.X.Type() is the top-level type of the method expression. To
+ // correctly handle method expressions involving embedded fields,
+ // look up the generic method below using the type of the receiver
+ // of se.Selection, since that will be the type that actually has
+ // the method.
+ recv := deref(se.Selection.Type.Recv().Type)
+ if len(recv.RParams()) == 0 {
+ // The embedded type that actually has the method is not
+ // actually generic, so no need to build a closure.
+ return x
+ }
+ baseType := recv.OrigSym.Def.Type()
+ var gf *ir.Name
+ for _, m := range baseType.Methods().Slice() {
+ if se.Sel == m.Sym {
+ gf = m.Nname.(*ir.Name)
+ break
+ }
+ }
+ if !gf.Type().Recv().Type.IsPtr() {
+ // Remember if value method, so we can detect (*T).M case.
+ valueMethod = true
+ }
+ target = g.getInstantiation(gf, targs, true)
+ dictValue, usingSubdict = g.getDictOrSubdict(outerInfo, x, gf, targs, true)
+ if infoPrintMode {
+ dictkind := "Main dictionary"
+ if usingSubdict {
+ dictkind = "Sub-dictionary"
+ }
+ fmt.Printf("%s in %v for method expression %v\n", dictkind, outer, x)
+ }
+ }
+
+ // Build a closure to implement a function instantiation.
+ //
+ // func f[T any] (int, int) (int, int) { ...whatever... }
+ //
+ // Then any reference to f[int] not directly called gets rewritten to
+ //
+ // .dictN := ... dictionary to use ...
+ // func(a0, a1 int) (r0, r1 int) {
+ // return .inst.f[int](.dictN, a0, a1)
+ // }
+ //
+ // Similarly for method expressions,
+ //
+ // type g[T any] ....
+ // func (rcvr g[T]) f(a0, a1 int) (r0, r1 int) { ... }
+ //
+ // Any reference to g[int].f not directly called gets rewritten to
+ //
+ // .dictN := ... dictionary to use ...
+ // func(rcvr g[int], a0, a1 int) (r0, r1 int) {
+ // return .inst.g[int].f(.dictN, rcvr, a0, a1)
+ // }
+ //
+ // Also method values
+ //
+ // var x g[int]
+ //
+ // Any reference to x.f not directly called gets rewritten to
+ //
+ // .dictN := ... dictionary to use ...
+ // x2 := x
+ // func(a0, a1 int) (r0, r1 int) {
+ // return .inst.g[int].f(.dictN, x2, a0, a1)
+ // }
+
+ // Make a new internal function.
+ fn, formalParams, formalResults := startClosure(pos, outer, typ)
+
+ // This is the dictionary we want to use.
+ // It may be a constant, or it may be a dictionary acquired from the outer function's dictionary.
+ // For the latter, dictVar is a variable in the outer function's scope, set to the subdictionary
+ // read from the outer function's dictionary.
+ var dictVar *ir.Name
+ var dictAssign *ir.AssignStmt
+ if outer != nil {
+ // Note: for now this is a compile-time constant, so we don't really need a closure
+ // to capture it (a wrapper function would work just as well). But eventually it
+ // will be a read of a subdictionary from the parent dictionary.
+ dictVar = ir.NewNameAt(pos, typecheck.LookupNum(".dict", g.dnum))
+ g.dnum++
+ dictVar.Class = ir.PAUTO
+ typed(types.Types[types.TUINTPTR], dictVar)
+ dictVar.Curfn = outer
+ dictAssign = ir.NewAssignStmt(pos, dictVar, dictValue)
+ dictAssign.SetTypecheck(1)
+ dictVar.Defn = dictAssign
+ outer.Dcl = append(outer.Dcl, dictVar)
+ }
+ // assign the receiver to a temporary.
+ var rcvrVar *ir.Name
+ var rcvrAssign ir.Node
+ if rcvrValue != nil {
+ rcvrVar = ir.NewNameAt(pos, typecheck.LookupNum(".rcvr", g.dnum))
+ g.dnum++
+ rcvrVar.Class = ir.PAUTO
+ typed(rcvrValue.Type(), rcvrVar)
+ rcvrVar.Curfn = outer
+ rcvrAssign = ir.NewAssignStmt(pos, rcvrVar, rcvrValue)
+ rcvrAssign.SetTypecheck(1)
+ rcvrVar.Defn = rcvrAssign
+ outer.Dcl = append(outer.Dcl, rcvrVar)
+ }
+
+ // Build body of closure. This involves just calling the wrapped function directly
+ // with the additional dictionary argument.
+
+ // First, figure out the dictionary argument.
+ var dict2Var ir.Node
+ if usingSubdict {
+ // Capture sub-dictionary calculated in the outer function
+ dict2Var = ir.CaptureName(pos, fn, dictVar)
+ typed(types.Types[types.TUINTPTR], dict2Var)
+ } else {
+ // Static dictionary, so can be used directly in the closure
+ dict2Var = dictValue
+ }
+ // Also capture the receiver variable.
+ var rcvr2Var *ir.Name
+ if rcvrValue != nil {
+ rcvr2Var = ir.CaptureName(pos, fn, rcvrVar)
+ }
+
+ // Build arguments to call inside the closure.
+ var args []ir.Node
+
+ // First the dictionary argument.
+ args = append(args, dict2Var)
+ // Then the receiver.
+ if rcvrValue != nil {
+ args = append(args, rcvr2Var)
+ }
+ // Then all the other arguments (including receiver for method expressions).
+ for i := 0; i < typ.NumParams(); i++ {
+ if x.Op() == ir.OMETHEXPR && i == 0 {
+ // If we are doing a method expression, we need to
+ // explicitly traverse any embedded fields in the receiver
+ // argument in order to call the method instantiation.
+ arg0 := formalParams[0].Nname.(ir.Node)
+ arg0 = typecheck.AddImplicitDots(ir.NewSelectorExpr(base.Pos, ir.OXDOT, arg0, x.(*ir.SelectorExpr).Sel)).X
+ if valueMethod && arg0.Type().IsPtr() {
+ // For handling the (*T).M case: if we have a pointer
+ // receiver after following all the embedded fields,
+ // but it's a value method, add a star operator.
+ arg0 = ir.NewStarExpr(arg0.Pos(), arg0)
+ }
+ args = append(args, arg0)
+ } else {
+ args = append(args, formalParams[i].Nname.(*ir.Name))
+ }
+ }
+
+ // Build call itself.
+ var innerCall ir.Node = ir.NewCallExpr(pos, ir.OCALL, target.Nname, args)
+ if len(formalResults) > 0 {
+ innerCall = ir.NewReturnStmt(pos, []ir.Node{innerCall})
+ }
+ // Finish building body of closure.
+ ir.CurFunc = fn
+ // TODO: set types directly here instead of using typecheck.Stmt
+ typecheck.Stmt(innerCall)
+ ir.CurFunc = nil
+ fn.Body = []ir.Node{innerCall}
+
+ // We're all done with the captured dictionary (and receiver, for method values).
+ ir.FinishCaptureNames(pos, outer, fn)
+
+ // Make a closure referencing our new internal function.
+ c := ir.UseClosure(fn.OClosure, g.target)
+ var init []ir.Node
+ if outer != nil {
+ init = append(init, dictAssign)
+ }
+ if rcvrValue != nil {
+ init = append(init, rcvrAssign)
+ }
+ return ir.InitExpr(init, c)
}
-// instantiateMethods instantiates all the methods of all fully-instantiated
-// generic types that have been added to g.instTypeList.
+// instantiateMethods instantiates all the methods (and associated dictionaries) of
+// all fully-instantiated generic types that have been added to g.instTypeList.
func (g *irgen) instantiateMethods() {
for i := 0; i < len(g.instTypeList); i++ {
typ := g.instTypeList[i]
- // Get the base generic type by looking up the symbol of the
- // generic (uninstantiated) name.
- baseSym := typ.Sym().Pkg.Lookup(genericTypeName(typ.Sym()))
+ assert(!typ.HasShape())
+ // Mark runtime type as needed, since this ensures that the
+ // compiler puts out the needed DWARF symbols, when this
+ // instantiated type has a different package from the local
+ // package.
+ typecheck.NeedRuntimeType(typ)
+ // Lookup the method on the base generic type, since methods may
+ // not be set on imported instantiated types.
+ baseSym := typ.OrigSym
baseType := baseSym.Def.(*ir.Name).Type()
- for j, m := range typ.Methods().Slice() {
- name := m.Nname.(*ir.Name)
- targs := make([]ir.Node, len(typ.RParams()))
- for k, targ := range typ.RParams() {
- targs[k] = ir.TypeNode(targ)
+ for j, _ := range typ.Methods().Slice() {
+ if baseType.Methods().Slice()[j].Nointerface() {
+ typ.Methods().Slice()[j].SetNointerface(true)
}
baseNname := baseType.Methods().Slice()[j].Nname.(*ir.Name)
- name.Func = g.getInstantiation(baseNname, targs, true)
+ // Eagerly generate the instantiations and dictionaries that implement these methods.
+ // We don't use the instantiations here, just generate them (and any
+ // further instantiations those generate, etc.).
+ // Note that we don't set the Func for any methods on instantiated
+ // types. Their signatures don't match so that would be confusing.
+ // Direct method calls go directly to the instantiations, implemented above.
+ // Indirect method calls use wrappers generated in reflectcall. Those wrappers
+ // will use these instantiations if they are needed (for interface tables or reflection).
+ _ = g.getInstantiation(baseNname, typ.RParams(), true)
+ _ = g.getDictionarySym(baseNname, typ.RParams(), true)
}
}
g.instTypeList = nil
}
-// genericSym returns the name of the base generic type for the type named by
-// sym. It simply returns the name obtained by removing everything after the
-// first bracket ("[").
-func genericTypeName(sym *types.Sym) string {
- return sym.Name[0:strings.Index(sym.Name, "[")]
-}
-
-// getInstantiationForNode returns the function/method instantiation for a
-// InstExpr node inst.
-func (g *irgen) getInstantiationForNode(inst *ir.InstExpr) *ir.Func {
+// getInstNameNode returns the name node for the method or function being instantiated, and a bool which is true if a method is being instantiated.
+func (g *irgen) getInstNameNode(inst *ir.InstExpr) (*ir.Name, bool) {
if meth, ok := inst.X.(*ir.SelectorExpr); ok {
- return g.getInstantiation(meth.Selection.Nname.(*ir.Name), inst.Targs, true)
+ return meth.Selection.Nname.(*ir.Name), true
} else {
- return g.getInstantiation(inst.X.(*ir.Name), inst.Targs, false)
+ return inst.X.(*ir.Name), false
}
}
-// getInstantiation gets the instantiantion of the function or method nameNode
-// with the type arguments targs. If the instantiated function is not already
-// cached, then it calls genericSubst to create the new instantiation.
-func (g *irgen) getInstantiation(nameNode *ir.Name, targs []ir.Node, isMeth bool) *ir.Func {
- sym := makeInstName(nameNode.Sym(), targs, isMeth)
- st := g.target.Stencils[sym]
- if st == nil {
- // If instantiation doesn't exist yet, create it and add
- // to the list of decls.
- st = g.genericSubst(sym, nameNode, targs, isMeth)
- g.target.Stencils[sym] = st
- g.target.Decls = append(g.target.Decls, st)
- if base.Flag.W > 1 {
- ir.Dump(fmt.Sprintf("\nstenciled %v", st), st)
+// getDictOrSubdict returns, for a method/function call or reference (node n) in an
+// instantiation (described by instInfo), a node which is accessing a sub-dictionary
+// or main/static dictionary, as needed, and also returns a boolean indicating if a
+// sub-dictionary was accessed. nameNode is the particular function or method being
+// called/referenced, and targs are the type arguments.
+func (g *irgen) getDictOrSubdict(declInfo *instInfo, n ir.Node, nameNode *ir.Name, targs []*types.Type, isMeth bool) (ir.Node, bool) {
+ var dict ir.Node
+ usingSubdict := false
+ if declInfo != nil {
+ // Get the dictionary arg via sub-dictionary reference
+ entry, ok := declInfo.dictEntryMap[n]
+ // If the entry is not found, it may be that this node did not have
+ // any type args that depend on type params, so we need a main
+ // dictionary, not a sub-dictionary.
+ if ok {
+ dict = getDictionaryEntry(n.Pos(), declInfo.dictParam, entry, declInfo.dictLen)
+ usingSubdict = true
}
}
- return st
+ if !usingSubdict {
+ dict = g.getDictionaryValue(nameNode, targs, isMeth)
+ }
+ return dict, usingSubdict
}
-// makeInstName makes the unique name for a stenciled generic function or method,
-// based on the name of the function fy=nsym and the targs. It replaces any
-// existing bracket type list in the name. makeInstName asserts that fnsym has
-// brackets in its name if and only if hasBrackets is true.
-// TODO(danscales): remove the assertions and the hasBrackets argument later.
-//
-// Names of declared generic functions have no brackets originally, so hasBrackets
-// should be false. Names of generic methods already have brackets, since the new
-// type parameter is specified in the generic type of the receiver (e.g. func
-// (func (v *value[T]).set(...) { ... } has the original name (*value[T]).set.
-//
-// The standard naming is something like: 'genFn[int,bool]' for functions and
-// '(*genType[int,bool]).methodName' for methods
-func makeInstName(fnsym *types.Sym, targs []ir.Node, hasBrackets bool) *types.Sym {
- b := bytes.NewBufferString("")
- name := fnsym.Name
- i := strings.Index(name, "[")
- assert(hasBrackets == (i >= 0))
- if i >= 0 {
- b.WriteString(name[0:i])
- } else {
- b.WriteString(name)
+// checkFetchBody checks if a generic body can be fetched, but hasn't been loaded
+// yet. If so, it imports the body.
+func checkFetchBody(nameNode *ir.Name) {
+ if nameNode.Func.Body == nil && nameNode.Func.Inl != nil {
+ // If there is no body yet but Func.Inl exists, then we can can
+ // import the whole generic body.
+ assert(nameNode.Func.Inl.Cost == 1 && nameNode.Sym().Pkg != types.LocalPkg)
+ typecheck.ImportBody(nameNode.Func)
+ assert(nameNode.Func.Inl.Body != nil)
+ nameNode.Func.Body = nameNode.Func.Inl.Body
+ nameNode.Func.Dcl = nameNode.Func.Inl.Dcl
}
- b.WriteString("[")
- for i, targ := range targs {
- if i > 0 {
- b.WriteString(",")
+}
+
+// getInstantiation gets the instantiantion and dictionary of the function or method nameNode
+// with the type arguments shapes. If the instantiated function is not already
+// cached, then it calls genericSubst to create the new instantiation.
+func (g *irgen) getInstantiation(nameNode *ir.Name, shapes []*types.Type, isMeth bool) *ir.Func {
+ checkFetchBody(nameNode)
+
+ // Convert any non-shape type arguments to their shape, so we can reduce the
+ // number of instantiations we have to generate. You can actually have a mix
+ // of shape and non-shape arguments, because of inferred or explicitly
+ // specified concrete type args.
+ var s1 []*types.Type
+ for i, t := range shapes {
+ if !t.HasShape() {
+ if s1 == nil {
+ s1 = make([]*types.Type, len(shapes))
+ copy(s1[0:i], shapes[0:i])
+ }
+ s1[i] = typecheck.Shapify(t)
+ } else if s1 != nil {
+ s1[i] = shapes[i]
}
- b.WriteString(targ.Type().String())
}
- b.WriteString("]")
- if i >= 0 {
- i2 := strings.Index(name[i:], "]")
- assert(i2 >= 0)
- b.WriteString(name[i+i2+1:])
+ if s1 != nil {
+ shapes = s1
}
- return typecheck.Lookup(b.String())
+
+ sym := typecheck.MakeFuncInstSym(nameNode.Sym(), shapes, isMeth)
+ info := g.instInfoMap[sym]
+ if info == nil {
+ // If instantiation doesn't exist yet, create it and add
+ // to the list of decls.
+ gfInfo := g.getGfInfo(nameNode)
+ info = &instInfo{
+ gf: nameNode,
+ gfInfo: gfInfo,
+ startSubDict: len(shapes) + len(gfInfo.derivedTypes),
+ startItabConv: len(shapes) + len(gfInfo.derivedTypes) + len(gfInfo.subDictCalls),
+ dictLen: len(shapes) + len(gfInfo.derivedTypes) + len(gfInfo.subDictCalls) + len(gfInfo.itabConvs),
+ dictEntryMap: make(map[ir.Node]int),
+ }
+ // genericSubst fills in info.dictParam and info.dictEntryMap.
+ st := g.genericSubst(sym, nameNode, shapes, isMeth, info)
+ info.fun = st
+ g.instInfoMap[sym] = info
+ // This ensures that the linker drops duplicates of this instantiation.
+ // All just works!
+ st.SetDupok(true)
+ g.target.Decls = append(g.target.Decls, st)
+ if base.Flag.W > 1 {
+ ir.Dump(fmt.Sprintf("\nstenciled %v", st), st)
+ }
+ }
+ return info.fun
}
// Struct containing info needed for doing the substitution as we create the
@@ -243,32 +625,30 @@ type subster struct {
g *irgen
isMethod bool // If a method is being instantiated
newf *ir.Func // Func node for the new stenciled function
- tparams []*types.Field
- targs []ir.Node
- // The substitution map from name nodes in the generic function to the
- // name nodes in the new stenciled function.
- vars map[*ir.Name]*ir.Name
+ ts typecheck.Tsubster
+ info *instInfo // Place to put extra info in the instantiation
}
// genericSubst returns a new function with name newsym. The function is an
// instantiation of a generic function or method specified by namedNode with type
-// args targs. For a method with a generic receiver, it returns an instantiated
-// function type where the receiver becomes the first parameter. Otherwise the
-// instantiated method would still need to be transformed by later compiler
-// phases.
-func (g *irgen) genericSubst(newsym *types.Sym, nameNode *ir.Name, targs []ir.Node, isMethod bool) *ir.Func {
- var tparams []*types.Field
+// args shapes. For a method with a generic receiver, it returns an instantiated
+// function type where the receiver becomes the first parameter. For either a generic
+// method or function, a dictionary parameter is the added as the very first
+// parameter. genericSubst fills in info.dictParam and info.dictEntryMap.
+func (g *irgen) genericSubst(newsym *types.Sym, nameNode *ir.Name, shapes []*types.Type, isMethod bool, info *instInfo) *ir.Func {
+ var tparams []*types.Type
if isMethod {
// Get the type params from the method receiver (after skipping
// over any pointer)
recvType := nameNode.Type().Recv().Type
recvType = deref(recvType)
- tparams = make([]*types.Field, len(recvType.RParams()))
- for i, rparam := range recvType.RParams() {
- tparams[i] = types.NewField(src.NoXPos, nil, rparam)
- }
+ tparams = recvType.RParams()
} else {
- tparams = nameNode.Type().TParams().Fields().Slice()
+ fields := nameNode.Type().TParams().Fields().Slice()
+ tparams = make([]*types.Type, len(fields))
+ for i, f := range fields {
+ tparams[i] = f.Type
+ }
}
gf := nameNode.Func
// Pos of the instantiated function is same as the generic function
@@ -283,78 +663,214 @@ func (g *irgen) genericSubst(newsym *types.Sym, nameNode *ir.Name, targs []ir.No
// depend on ir.CurFunc being set.
ir.CurFunc = newf
- assert(len(tparams) == len(targs))
+ assert(len(tparams) == len(shapes))
subst := &subster{
g: g,
isMethod: isMethod,
newf: newf,
- tparams: tparams,
- targs: targs,
- vars: make(map[*ir.Name]*ir.Name),
+ info: info,
+ ts: typecheck.Tsubster{
+ Tparams: tparams,
+ Targs: shapes,
+ Vars: make(map[*ir.Name]*ir.Name),
+ },
}
- newf.Dcl = make([]*ir.Name, len(gf.Dcl))
- for i, n := range gf.Dcl {
- newf.Dcl[i] = subst.node(n).(*ir.Name)
+ newf.Dcl = make([]*ir.Name, 0, len(gf.Dcl)+1)
+
+ // Create the needed dictionary param
+ dictionarySym := newsym.Pkg.Lookup(".dict")
+ dictionaryType := types.Types[types.TUINTPTR]
+ dictionaryName := ir.NewNameAt(gf.Pos(), dictionarySym)
+ typed(dictionaryType, dictionaryName)
+ dictionaryName.Class = ir.PPARAM
+ dictionaryName.Curfn = newf
+ newf.Dcl = append(newf.Dcl, dictionaryName)
+ for _, n := range gf.Dcl {
+ if n.Sym().Name == ".dict" {
+ panic("already has dictionary")
+ }
+ newf.Dcl = append(newf.Dcl, subst.localvar(n))
}
+ dictionaryArg := types.NewField(gf.Pos(), dictionarySym, dictionaryType)
+ dictionaryArg.Nname = dictionaryName
+ info.dictParam = dictionaryName
- // Ugly: we have to insert the Name nodes of the parameters/results into
+ // We add the dictionary as the first parameter in the function signature.
+ // We also transform a method type to the corresponding function type
+ // (make the receiver be the next parameter after the dictionary).
+ oldt := nameNode.Type()
+ var args []*types.Field
+ args = append(args, dictionaryArg)
+ args = append(args, oldt.Recvs().FieldSlice()...)
+ args = append(args, oldt.Params().FieldSlice()...)
+
+ // Replace the types in the function signature via subst.fields.
+ // Ugly: also, we have to insert the Name nodes of the parameters/results into
// the function type. The current function type has no Nname fields set,
// because it came via conversion from the types2 type.
- oldt := nameNode.Type()
- // We also transform a generic method type to the corresponding
- // instantiated function type where the receiver is the first parameter.
newt := types.NewSignature(oldt.Pkg(), nil, nil,
- subst.fields(ir.PPARAM, append(oldt.Recvs().FieldSlice(), oldt.Params().FieldSlice()...), newf.Dcl),
+ subst.fields(ir.PPARAM, args, newf.Dcl),
subst.fields(ir.PPARAMOUT, oldt.Results().FieldSlice(), newf.Dcl))
- newf.Nname.SetType(newt)
+ typed(newt, newf.Nname)
ir.MarkFunc(newf.Nname)
newf.SetTypecheck(1)
- newf.Nname.SetTypecheck(1)
// Make sure name/type of newf is set before substituting the body.
newf.Body = subst.list(gf.Body)
+
+ // Add code to check that the dictionary is correct.
+ // TODO: must be adjusted to deal with shapes, but will go away soon when we move
+ // to many->1 shape to concrete mapping.
+ // newf.Body.Prepend(subst.checkDictionary(dictionaryName, shapes)...)
+
ir.CurFunc = savef
+ // Add any new, fully instantiated types seen during the substitution to
+ // g.instTypeList.
+ g.instTypeList = append(g.instTypeList, subst.ts.InstTypeList...)
+
+ if doubleCheck {
+ okConvs := map[ir.Node]bool{}
+ ir.Visit(newf, func(n ir.Node) {
+ if n.Op() == ir.OIDATA {
+ // IDATA(OCONVIFACE(x)) is ok, as we don't use the type of x.
+ // TODO: use some other op besides OCONVIFACE. ONEW might work
+ // (with appropriate direct vs. indirect interface cases).
+ okConvs[n.(*ir.UnaryExpr).X] = true
+ }
+ if n.Op() == ir.OCONVIFACE && !okConvs[n] {
+ c := n.(*ir.ConvExpr)
+ if c.X.Type().HasShape() {
+ ir.Dump("BAD FUNCTION", newf)
+ ir.Dump("BAD CONVERSION", c)
+ base.Fatalf("converting shape type to interface")
+ }
+ }
+ })
+ }
return newf
}
-// node is like DeepCopy(), but creates distinct ONAME nodes, and also descends
-// into closures. It substitutes type arguments for type parameters in all the new
-// nodes.
+// localvar creates a new name node for the specified local variable and enters it
+// in subst.vars. It substitutes type arguments for type parameters in the type of
+// name as needed.
+func (subst *subster) localvar(name *ir.Name) *ir.Name {
+ m := ir.NewNameAt(name.Pos(), name.Sym())
+ if name.IsClosureVar() {
+ m.SetIsClosureVar(true)
+ }
+ m.SetType(subst.ts.Typ(name.Type()))
+ m.BuiltinOp = name.BuiltinOp
+ m.Curfn = subst.newf
+ m.Class = name.Class
+ assert(name.Class != ir.PEXTERN && name.Class != ir.PFUNC)
+ m.Func = name.Func
+ subst.ts.Vars[name] = m
+ m.SetTypecheck(1)
+ return m
+}
+
+// checkDictionary returns code that does runtime consistency checks
+// between the dictionary and the types it should contain.
+func (subst *subster) checkDictionary(name *ir.Name, targs []*types.Type) (code []ir.Node) {
+ if false {
+ return // checking turned off
+ }
+ // TODO: when moving to GCshape, this test will become harder. Call into
+ // runtime to check the expected shape is correct?
+ pos := name.Pos()
+ // Convert dictionary to *[N]uintptr
+ d := ir.NewConvExpr(pos, ir.OCONVNOP, types.Types[types.TUNSAFEPTR], name)
+ d.SetTypecheck(1)
+ d = ir.NewConvExpr(pos, ir.OCONVNOP, types.NewArray(types.Types[types.TUINTPTR], int64(len(targs))).PtrTo(), d)
+ d.SetTypecheck(1)
+ types.CheckSize(d.Type().Elem())
+
+ // Check that each type entry in the dictionary is correct.
+ for i, t := range targs {
+ if t.HasShape() {
+ // Check the concrete type, not the shape type.
+ base.Fatalf("shape type in dictionary %s %+v\n", name.Sym().Name, t)
+ }
+ want := reflectdata.TypePtr(t)
+ typed(types.Types[types.TUINTPTR], want)
+ deref := ir.NewStarExpr(pos, d)
+ typed(d.Type().Elem(), deref)
+ idx := ir.NewConstExpr(constant.MakeUint64(uint64(i)), name) // TODO: what to set orig to?
+ typed(types.Types[types.TUINTPTR], idx)
+ got := ir.NewIndexExpr(pos, deref, idx)
+ typed(types.Types[types.TUINTPTR], got)
+ cond := ir.NewBinaryExpr(pos, ir.ONE, want, got)
+ typed(types.Types[types.TBOOL], cond)
+ panicArg := ir.NewNilExpr(pos)
+ typed(types.NewInterface(types.LocalPkg, nil), panicArg)
+ then := ir.NewUnaryExpr(pos, ir.OPANIC, panicArg)
+ then.SetTypecheck(1)
+ x := ir.NewIfStmt(pos, cond, []ir.Node{then}, nil)
+ x.SetTypecheck(1)
+ code = append(code, x)
+ }
+ return
+}
+
+// getDictionaryEntry gets the i'th entry in the dictionary dict.
+func getDictionaryEntry(pos src.XPos, dict *ir.Name, i int, size int) ir.Node {
+ // Convert dictionary to *[N]uintptr
+ // All entries in the dictionary are pointers. They all point to static data, though, so we
+ // treat them as uintptrs so the GC doesn't need to keep track of them.
+ d := ir.NewConvExpr(pos, ir.OCONVNOP, types.Types[types.TUNSAFEPTR], dict)
+ d.SetTypecheck(1)
+ d = ir.NewConvExpr(pos, ir.OCONVNOP, types.NewArray(types.Types[types.TUINTPTR], int64(size)).PtrTo(), d)
+ d.SetTypecheck(1)
+ types.CheckSize(d.Type().Elem())
+
+ // Load entry i out of the dictionary.
+ deref := ir.NewStarExpr(pos, d)
+ typed(d.Type().Elem(), deref)
+ idx := ir.NewConstExpr(constant.MakeUint64(uint64(i)), dict) // TODO: what to set orig to?
+ typed(types.Types[types.TUINTPTR], idx)
+ r := ir.NewIndexExpr(pos, deref, idx)
+ typed(types.Types[types.TUINTPTR], r)
+ return r
+}
+
+// getDictionaryType returns a *runtime._type from the dictionary entry i (which
+// refers to a type param or a derived type that uses type params). It uses the
+// specified dictionary dictParam, rather than the one in info.dictParam.
+func getDictionaryType(info *instInfo, dictParam *ir.Name, pos src.XPos, i int) ir.Node {
+ if i < 0 || i >= info.startSubDict {
+ base.Fatalf(fmt.Sprintf("bad dict index %d", i))
+ }
+
+ r := getDictionaryEntry(pos, info.dictParam, i, info.startSubDict)
+ // change type of retrieved dictionary entry to *byte, which is the
+ // standard typing of a *runtime._type in the compiler
+ typed(types.Types[types.TUINT8].PtrTo(), r)
+ return r
+}
+
+// node is like DeepCopy(), but substitutes ONAME nodes based on subst.ts.vars, and
+// also descends into closures. It substitutes type arguments for type parameters
+// in all the new nodes.
func (subst *subster) node(n ir.Node) ir.Node {
// Use closure to capture all state needed by the ir.EditChildren argument.
var edit func(ir.Node) ir.Node
edit = func(x ir.Node) ir.Node {
switch x.Op() {
case ir.OTYPE:
- return ir.TypeNode(subst.typ(x.Type()))
+ return ir.TypeNode(subst.ts.Typ(x.Type()))
case ir.ONAME:
- name := x.(*ir.Name)
- if v := subst.vars[name]; v != nil {
+ if v := subst.ts.Vars[x.(*ir.Name)]; v != nil {
return v
}
- m := ir.NewNameAt(name.Pos(), name.Sym())
- if name.IsClosureVar() {
- m.SetIsClosureVar(true)
- }
- t := x.Type()
- if t == nil {
- assert(name.BuiltinOp != 0)
- } else {
- newt := subst.typ(t)
- m.SetType(newt)
- }
- m.BuiltinOp = name.BuiltinOp
- m.Curfn = subst.newf
- m.Class = name.Class
- m.Func = name.Func
- subst.vars[name] = m
- m.SetTypecheck(1)
- return m
+ return x
+ case ir.ONONAME:
+ // This handles the identifier in a type switch guard
+ fallthrough
case ir.OLITERAL, ir.ONIL:
if x.Sym() != nil {
return x
@@ -369,55 +885,66 @@ func (subst *subster) node(n ir.Node) ir.Node {
// an error.
_, isCallExpr := m.(*ir.CallExpr)
_, isStructKeyExpr := m.(*ir.StructKeyExpr)
- if !isCallExpr && !isStructKeyExpr && x.Op() != ir.OPANIC &&
+ _, isKeyExpr := m.(*ir.KeyExpr)
+ if !isCallExpr && !isStructKeyExpr && !isKeyExpr && x.Op() != ir.OPANIC &&
x.Op() != ir.OCLOSE {
base.Fatalf(fmt.Sprintf("Nil type for %v", x))
}
} else if x.Op() != ir.OCLOSURE {
- m.SetType(subst.typ(x.Type()))
+ m.SetType(subst.ts.Typ(x.Type()))
}
}
- ir.EditChildren(m, edit)
-
- if x.Typecheck() == 3 {
- // These are nodes whose transforms were delayed until
- // their instantiated type was known.
- m.SetTypecheck(1)
- if typecheck.IsCmp(x.Op()) {
- transformCompare(m.(*ir.BinaryExpr))
- } else {
- switch x.Op() {
- case ir.OSLICE, ir.OSLICE3:
- transformSlice(m.(*ir.SliceExpr))
-
- case ir.OADD:
- m = transformAdd(m.(*ir.BinaryExpr))
- case ir.OINDEX:
- transformIndex(m.(*ir.IndexExpr))
+ for i, de := range subst.info.gfInfo.subDictCalls {
+ if de == x {
+ // Remember the dictionary entry associated with this
+ // node in the instantiated function
+ // TODO: make sure this remains correct with respect to the
+ // transformations below.
+ subst.info.dictEntryMap[m] = subst.info.startSubDict + i
+ break
+ }
+ }
- case ir.OAS2:
- as2 := m.(*ir.AssignListStmt)
- transformAssign(as2, as2.Lhs, as2.Rhs)
+ ir.EditChildren(m, edit)
- case ir.OAS:
- as := m.(*ir.AssignStmt)
+ m.SetTypecheck(1)
+ if typecheck.IsCmp(x.Op()) {
+ transformCompare(m.(*ir.BinaryExpr))
+ } else {
+ switch x.Op() {
+ case ir.OSLICE, ir.OSLICE3:
+ transformSlice(m.(*ir.SliceExpr))
+
+ case ir.OADD:
+ m = transformAdd(m.(*ir.BinaryExpr))
+
+ case ir.OINDEX:
+ transformIndex(m.(*ir.IndexExpr))
+
+ case ir.OAS2:
+ as2 := m.(*ir.AssignListStmt)
+ transformAssign(as2, as2.Lhs, as2.Rhs)
+
+ case ir.OAS:
+ as := m.(*ir.AssignStmt)
+ if as.Y != nil {
+ // transformAssign doesn't handle the case
+ // of zeroing assignment of a dcl (rhs[0] is nil).
lhs, rhs := []ir.Node{as.X}, []ir.Node{as.Y}
transformAssign(as, lhs, rhs)
+ }
- case ir.OASOP:
- as := m.(*ir.AssignOpStmt)
- transformCheckAssign(as, as.X)
+ case ir.OASOP:
+ as := m.(*ir.AssignOpStmt)
+ transformCheckAssign(as, as.X)
- case ir.ORETURN:
- transformReturn(m.(*ir.ReturnStmt))
+ case ir.ORETURN:
+ transformReturn(m.(*ir.ReturnStmt))
- case ir.OSEND:
- transformSend(m.(*ir.SendStmt))
+ case ir.OSEND:
+ transformSend(m.(*ir.SendStmt))
- default:
- base.Fatalf("Unexpected node with Typecheck() == 3")
- }
}
}
@@ -445,11 +972,40 @@ func (subst *subster) node(n ir.Node) ir.Node {
// instantiated receiver type. We need to do this now,
// since the access/selection to the method for the real
// type is very different from the selection for the type
- // param. m will be transformed to an OCALLPART node. It
+ // param. m will be transformed to an OMETHVALUE node. It
// will be transformed to an ODOTMETH or ODOTINTER node if
// we find in the OCALL case below that the method value
// is actually called.
- transformDot(m.(*ir.SelectorExpr), false)
+ mse := m.(*ir.SelectorExpr)
+ if src := mse.X.Type(); src.IsShape() {
+ // The only dot on a shape type value are methods.
+ if mse.X.Op() == ir.OTYPE {
+ // Method expression T.M
+ m = subst.g.buildClosure2(subst, m, x)
+ // No need for transformDot - buildClosure2 has already
+ // transformed to OCALLINTER/ODOTINTER.
+ } else {
+ // Implement x.M as a conversion-to-bound-interface
+ // 1) convert x to the bound interface
+ // 2) call M on that interface
+ gsrc := x.(*ir.SelectorExpr).X.Type()
+ bound := gsrc.Bound()
+ dst := bound
+ if dst.HasTParam() {
+ dst = subst.ts.Typ(dst)
+ }
+ if src.IsInterface() {
+ // If type arg is an interface (unusual case),
+ // we do a type assert to the type bound.
+ mse.X = assertToBound(subst.info, subst.info.dictParam, m.Pos(), mse.X, bound, dst)
+ } else {
+ mse.X = convertUsingDictionary(subst.info, subst.info.dictParam, m.Pos(), mse.X, x, dst, gsrc)
+ }
+ transformDot(mse, false)
+ }
+ } else {
+ transformDot(mse, false)
+ }
m.SetTypecheck(1)
case ir.OCALL:
@@ -458,9 +1014,11 @@ func (subst *subster) node(n ir.Node) ir.Node {
case ir.OTYPE:
// Transform the conversion, now that we know the
// type argument.
- m = transformConvCall(m.(*ir.CallExpr))
+ m = transformConvCall(call)
+ // CONVIFACE transformation was already done in node2
+ assert(m.Op() != ir.OCONVIFACE)
- case ir.OCALLPART:
+ case ir.OMETHVALUE, ir.OMETHEXPR:
// Redo the transformation of OXDOT, now that we
// know the method value is being called. Then
// transform the call.
@@ -479,7 +1037,7 @@ func (subst *subster) node(n ir.Node) ir.Node {
name := call.X.Name()
if name.BuiltinOp != ir.OXXX {
switch name.BuiltinOp {
- case ir.OMAKE, ir.OREAL, ir.OIMAG, ir.OLEN, ir.OCAP, ir.OAPPEND:
+ case ir.OMAKE, ir.OREAL, ir.OIMAG, ir.OAPPEND, ir.ODELETE, ir.OALIGNOF, ir.OOFFSETOF, ir.OSIZEOF:
// Transform these builtins now that we
// know the type of the args.
m = transformBuiltin(call)
@@ -506,41 +1064,142 @@ func (subst *subster) node(n ir.Node) ir.Node {
}
case ir.OCLOSURE:
+ // We're going to create a new closure from scratch, so clear m
+ // to avoid using the ir.Copy by accident until we reassign it.
+ m = nil
+
x := x.(*ir.ClosureExpr)
// Need to duplicate x.Func.Nname, x.Func.Dcl, x.Func.ClosureVars, and
// x.Func.Body.
oldfn := x.Func
- newfn := ir.NewFunc(oldfn.Pos())
- if oldfn.ClosureCalled() {
- newfn.SetClosureCalled(true)
- }
- newfn.SetIsHiddenClosure(true)
- m.(*ir.ClosureExpr).Func = newfn
- // Closure name can already have brackets, if it derives
- // from a generic method
- newsym := makeInstName(oldfn.Nname.Sym(), subst.targs, subst.isMethod)
- newfn.Nname = ir.NewNameAt(oldfn.Nname.Pos(), newsym)
- newfn.Nname.Func = newfn
- newfn.Nname.Defn = newfn
- ir.MarkFunc(newfn.Nname)
- newfn.OClosure = m.(*ir.ClosureExpr)
+ newfn := ir.NewClosureFunc(oldfn.Pos(), subst.newf != nil)
+ ir.NameClosure(newfn.OClosure, subst.newf)
saveNewf := subst.newf
ir.CurFunc = newfn
subst.newf = newfn
newfn.Dcl = subst.namelist(oldfn.Dcl)
- newfn.ClosureVars = subst.namelist(oldfn.ClosureVars)
- typed(subst.typ(oldfn.Nname.Type()), newfn.Nname)
- typed(newfn.Nname.Type(), m)
+ // Make a closure variable for the dictionary of the
+ // containing function.
+ cdict := ir.CaptureName(oldfn.Pos(), newfn, subst.info.dictParam)
+ typed(types.Types[types.TUINTPTR], cdict)
+ ir.FinishCaptureNames(oldfn.Pos(), saveNewf, newfn)
+ newfn.ClosureVars = append(newfn.ClosureVars, subst.namelist(oldfn.ClosureVars)...)
+
+ // Copy that closure variable to a local one.
+ // Note: this allows the dictionary to be captured by child closures.
+ // See issue 47723.
+ ldict := ir.NewNameAt(x.Pos(), subst.info.gf.Sym().Pkg.Lookup(".dict"))
+ typed(types.Types[types.TUINTPTR], ldict)
+ ldict.Class = ir.PAUTO
+ ldict.Curfn = newfn
+ newfn.Dcl = append(newfn.Dcl, ldict)
+ as := ir.NewAssignStmt(x.Pos(), ldict, cdict)
+ as.SetTypecheck(1)
+ newfn.Body.Append(as)
+
+ // Create inst info for the instantiated closure. The dict
+ // param is the closure variable for the dictionary of the
+ // outer function. Since the dictionary is shared, use the
+ // same entries for startSubDict, dictLen, dictEntryMap.
+ cinfo := &instInfo{
+ fun: newfn,
+ dictParam: ldict,
+ gf: subst.info.gf,
+ gfInfo: subst.info.gfInfo,
+ startSubDict: subst.info.startSubDict,
+ startItabConv: subst.info.startItabConv,
+ dictLen: subst.info.dictLen,
+ dictEntryMap: subst.info.dictEntryMap,
+ }
+ subst.g.instInfoMap[newfn.Nname.Sym()] = cinfo
+
+ typed(subst.ts.Typ(oldfn.Nname.Type()), newfn.Nname)
+ typed(newfn.Nname.Type(), newfn.OClosure)
newfn.SetTypecheck(1)
+ outerinfo := subst.info
+ subst.info = cinfo
// Make sure type of closure function is set before doing body.
- newfn.Body = subst.list(oldfn.Body)
+ newfn.Body.Append(subst.list(oldfn.Body)...)
+ subst.info = outerinfo
subst.newf = saveNewf
ir.CurFunc = saveNewf
- subst.g.target.Decls = append(subst.g.target.Decls, newfn)
+ m = ir.UseClosure(newfn.OClosure, subst.g.target)
+ m.(*ir.ClosureExpr).SetInit(subst.list(x.Init()))
+
+ case ir.OCONVIFACE:
+ x := x.(*ir.ConvExpr)
+ // Note: x's argument is still typed as a type parameter.
+ // m's argument now has an instantiated type.
+ if x.X.Type().HasTParam() {
+ m = convertUsingDictionary(subst.info, subst.info.dictParam, m.Pos(), m.(*ir.ConvExpr).X, x, m.Type(), x.X.Type())
+ }
+ case ir.ODOTTYPE, ir.ODOTTYPE2:
+ if !x.Type().HasTParam() {
+ break
+ }
+ dt := m.(*ir.TypeAssertExpr)
+ var rt ir.Node
+ if dt.Type().IsInterface() || dt.X.Type().IsEmptyInterface() {
+ ix := findDictType(subst.info, x.Type())
+ assert(ix >= 0)
+ rt = getDictionaryType(subst.info, subst.info.dictParam, dt.Pos(), ix)
+ } else {
+ // nonempty interface to noninterface. Need an itab.
+ ix := -1
+ for i, ic := range subst.info.gfInfo.itabConvs {
+ if ic == x {
+ ix = subst.info.startItabConv + i
+ break
+ }
+ }
+ assert(ix >= 0)
+ rt = getDictionaryEntry(dt.Pos(), subst.info.dictParam, ix, subst.info.dictLen)
+ }
+ op := ir.ODYNAMICDOTTYPE
+ if x.Op() == ir.ODOTTYPE2 {
+ op = ir.ODYNAMICDOTTYPE2
+ }
+ m = ir.NewDynamicTypeAssertExpr(dt.Pos(), op, dt.X, rt)
+ m.SetType(dt.Type())
+ m.SetTypecheck(1)
+ case ir.OCASE:
+ if _, ok := x.(*ir.CommClause); ok {
+ // This is not a type switch. TODO: Should we use an OSWITCH case here instead of OCASE?
+ break
+ }
+ x := x.(*ir.CaseClause)
+ m := m.(*ir.CaseClause)
+ for i, c := range x.List {
+ if c.Op() == ir.OTYPE && c.Type().HasTParam() {
+ // Use a *runtime._type for the dynamic type.
+ ix := findDictType(subst.info, c.Type())
+ assert(ix >= 0)
+ dt := ir.NewDynamicType(c.Pos(), getDictionaryEntry(c.Pos(), subst.info.dictParam, ix, subst.info.dictLen))
+
+ // For type switch from nonempty interfaces to non-interfaces, we need an itab as well.
+ if !m.List[i].Type().IsInterface() {
+ if _, ok := subst.info.gfInfo.type2switchType[c]; ok {
+ // Type switch from nonempty interface. We need a *runtime.itab
+ // for the dynamic type.
+ ix := -1
+ for i, ic := range subst.info.gfInfo.itabConvs {
+ if ic == c {
+ ix = subst.info.startItabConv + i
+ break
+ }
+ }
+ assert(ix >= 0)
+ dt.ITab = getDictionaryEntry(c.Pos(), subst.info.dictParam, ix, subst.info.dictLen)
+ }
+ }
+ typed(m.List[i].Type(), dt)
+ m.List[i] = dt
+ }
+ }
}
return m
}
@@ -548,10 +1207,78 @@ func (subst *subster) node(n ir.Node) ir.Node {
return edit(n)
}
+// findDictType looks for type t in the typeparams or derived types in the generic
+// function info.gfInfo. This will indicate the dictionary entry with the
+// correct concrete type for the associated instantiated function.
+func findDictType(info *instInfo, t *types.Type) int {
+ for i, dt := range info.gfInfo.tparams {
+ if dt == t {
+ return i
+ }
+ }
+ for i, dt := range info.gfInfo.derivedTypes {
+ if types.Identical(dt, t) {
+ return i + len(info.gfInfo.tparams)
+ }
+ }
+ return -1
+}
+
+// convertUsingDictionary converts value v from instantiated type src to an interface
+// type dst, by returning a new set of nodes that make use of a dictionary entry. src
+// is the generic (not shape) type, and gn is the original generic node of the
+// CONVIFACE node or XDOT node (for a bound method call) that is causing the
+// conversion.
+func convertUsingDictionary(info *instInfo, dictParam *ir.Name, pos src.XPos, v ir.Node, gn ir.Node, dst, src *types.Type) ir.Node {
+ assert(src.HasTParam())
+ assert(dst.IsInterface())
+
+ var rt ir.Node
+ if !dst.IsEmptyInterface() {
+ // We should have an itab entry in the dictionary. Using this itab
+ // will be more efficient than converting to an empty interface first
+ // and then type asserting to dst.
+ ix := -1
+ for i, ic := range info.gfInfo.itabConvs {
+ if ic == gn {
+ ix = info.startItabConv + i
+ break
+ }
+ }
+ assert(ix >= 0)
+ rt = getDictionaryEntry(pos, dictParam, ix, info.dictLen)
+ } else if v.Type().IsInterface() {
+ ta := ir.NewTypeAssertExpr(pos, v, nil)
+ ta.SetType(dst)
+ ta.SetTypecheck(1)
+ return ta
+ } else {
+ ix := findDictType(info, src)
+ assert(ix >= 0)
+ // Load the actual runtime._type of the type parameter from the dictionary.
+ rt = getDictionaryType(info, dictParam, pos, ix)
+ }
+
+ // Figure out what the data field of the interface will be.
+ var data ir.Node
+ if v.Type().IsInterface() {
+ data = ir.NewUnaryExpr(pos, ir.OIDATA, v)
+ } else {
+ data = ir.NewConvExpr(pos, ir.OCONVIDATA, nil, v)
+ }
+ typed(types.Types[types.TUNSAFEPTR], data)
+
+ // Build an interface from the type and data parts.
+ var i ir.Node = ir.NewBinaryExpr(pos, ir.OEFACE, rt, data)
+ typed(dst, i)
+ return i
+
+}
+
func (subst *subster) namelist(l []*ir.Name) []*ir.Name {
s := make([]*ir.Name, len(l))
for i, n := range l {
- s[i] = subst.node(n).(*ir.Name)
+ s[i] = subst.localvar(n)
if n.Defn != nil {
s[i].Defn = subst.node(n.Defn)
}
@@ -570,348 +1297,706 @@ func (subst *subster) list(l []ir.Node) []ir.Node {
return s
}
-// tstruct substitutes type params in types of the fields of a structure type. For
-// each field, if Nname is set, tstruct also translates the Nname using
-// subst.vars, if Nname is in subst.vars. To always force the creation of a new
-// (top-level) struct, regardless of whether anything changed with the types or
-// names of the struct's fields, set force to true.
-func (subst *subster) tstruct(t *types.Type, force bool) *types.Type {
- if t.NumFields() == 0 {
- if t.HasTParam() {
- // For an empty struct, we need to return a new type,
- // since it may now be fully instantiated (HasTParam
- // becomes false).
- return types.NewStruct(t.Pkg(), nil)
- }
- return t
- }
- var newfields []*types.Field
- if force {
- newfields = make([]*types.Field, t.NumFields())
- }
- for i, f := range t.Fields().Slice() {
- t2 := subst.typ(f.Type)
- if (t2 != f.Type || f.Nname != nil) && newfields == nil {
- newfields = make([]*types.Field, t.NumFields())
- for j := 0; j < i; j++ {
- newfields[j] = t.Field(j)
- }
- }
- if newfields != nil {
- // TODO(danscales): make sure this works for the field
- // names of embedded types (which should keep the name of
- // the type param, not the instantiated type).
- newfields[i] = types.NewField(f.Pos, f.Sym, t2)
- if f.Nname != nil {
- // f.Nname may not be in subst.vars[] if this is
- // a function name or a function instantiation type
- // that we are translating
- v := subst.vars[f.Nname.(*ir.Name)]
- // Be careful not to put a nil var into Nname,
- // since Nname is an interface, so it would be a
- // non-nil interface.
- if v != nil {
- newfields[i].Nname = v
- }
- }
+// fields sets the Nname field for the Field nodes inside a type signature, based
+// on the corresponding in/out parameters in dcl. It depends on the in and out
+// parameters being in order in dcl.
+func (subst *subster) fields(class ir.Class, oldfields []*types.Field, dcl []*ir.Name) []*types.Field {
+ // Find the starting index in dcl of declarations of the class (either
+ // PPARAM or PPARAMOUT).
+ var i int
+ for i = range dcl {
+ if dcl[i].Class == class {
+ break
}
}
- if newfields != nil {
- return types.NewStruct(t.Pkg(), newfields)
+
+ // Create newfields nodes that are copies of the oldfields nodes, but
+ // with substitution for any type params, and with Nname set to be the node in
+ // Dcl for the corresponding PPARAM or PPARAMOUT.
+ newfields := make([]*types.Field, len(oldfields))
+ for j := range oldfields {
+ newfields[j] = oldfields[j].Copy()
+ newfields[j].Type = subst.ts.Typ(oldfields[j].Type)
+ // A PPARAM field will be missing from dcl if its name is
+ // unspecified or specified as "_". So, we compare the dcl sym
+ // with the field sym (or sym of the field's Nname node). (Unnamed
+ // results still have a name like ~r2 in their Nname node.) If
+ // they don't match, this dcl (if there is one left) must apply to
+ // a later field.
+ if i < len(dcl) && (dcl[i].Sym() == oldfields[j].Sym ||
+ (oldfields[j].Nname != nil && dcl[i].Sym() == oldfields[j].Nname.Sym())) {
+ newfields[j].Nname = dcl[i]
+ i++
+ }
+ }
+ return newfields
+}
+
+// deref does a single deref of type t, if it is a pointer type.
+func deref(t *types.Type) *types.Type {
+ if t.IsPtr() {
+ return t.Elem()
}
return t
+}
+// markTypeUsed marks type t as used in order to help avoid dead-code elimination of
+// needed methods.
+func markTypeUsed(t *types.Type, lsym *obj.LSym) {
+ if t.IsInterface() {
+ // Mark all the methods of the interface as used.
+ // TODO: we should really only mark the interface methods
+ // that are actually called in the application.
+ for i, _ := range t.AllMethods().Slice() {
+ reflectdata.MarkUsedIfaceMethodIndex(lsym, t, i)
+ }
+ } else {
+ // TODO: This is somewhat overkill, we really only need it
+ // for types that are put into interfaces.
+ reflectdata.MarkTypeUsedInInterface(t, lsym)
+ }
}
-// tinter substitutes type params in types of the methods of an interface type.
-func (subst *subster) tinter(t *types.Type) *types.Type {
- if t.Methods().Len() == 0 {
- return t
+// getDictionarySym returns the dictionary for the named generic function gf, which
+// is instantiated with the type arguments targs.
+func (g *irgen) getDictionarySym(gf *ir.Name, targs []*types.Type, isMeth bool) *types.Sym {
+ if len(targs) == 0 {
+ base.Fatalf("%s should have type arguments", gf.Sym().Name)
+ }
+
+ // Enforce that only concrete types can make it to here.
+ for _, t := range targs {
+ if t.HasShape() {
+ panic(fmt.Sprintf("shape %+v in dictionary for %s", t, gf.Sym().Name))
+ }
+ }
+
+ // Get a symbol representing the dictionary.
+ sym := typecheck.MakeDictSym(gf.Sym(), targs, isMeth)
+
+ // Initialize the dictionary, if we haven't yet already.
+ lsym := sym.Linksym()
+ if len(lsym.P) > 0 {
+ // We already started creating this dictionary and its lsym.
+ return sym
+ }
+
+ info := g.getGfInfo(gf)
+
+ infoPrint("=== Creating dictionary %v\n", sym.Name)
+ off := 0
+ // Emit an entry for each targ (concrete type or gcshape).
+ for _, t := range targs {
+ infoPrint(" * %v\n", t)
+ s := reflectdata.TypeLinksym(t)
+ off = objw.SymPtr(lsym, off, s, 0)
+ markTypeUsed(t, lsym)
+ }
+ subst := typecheck.Tsubster{
+ Tparams: info.tparams,
+ Targs: targs,
}
- var newfields []*types.Field
- for i, f := range t.Methods().Slice() {
- t2 := subst.typ(f.Type)
- if (t2 != f.Type || f.Nname != nil) && newfields == nil {
- newfields = make([]*types.Field, t.Methods().Len())
- for j := 0; j < i; j++ {
- newfields[j] = t.Methods().Index(j)
+ // Emit an entry for each derived type (after substituting targs)
+ for _, t := range info.derivedTypes {
+ ts := subst.Typ(t)
+ infoPrint(" - %v\n", ts)
+ s := reflectdata.TypeLinksym(ts)
+ off = objw.SymPtr(lsym, off, s, 0)
+ markTypeUsed(ts, lsym)
+ }
+ // Emit an entry for each subdictionary (after substituting targs)
+ for _, n := range info.subDictCalls {
+ var sym *types.Sym
+ switch n.Op() {
+ case ir.OCALL:
+ call := n.(*ir.CallExpr)
+ if call.X.Op() == ir.OXDOT {
+ var nameNode *ir.Name
+ se := call.X.(*ir.SelectorExpr)
+ if types.IsInterfaceMethod(se.Selection.Type) {
+ // This is a method call enabled by a type bound.
+ tmpse := ir.NewSelectorExpr(base.Pos, ir.OXDOT, se.X, se.Sel)
+ tmpse = typecheck.AddImplicitDots(tmpse)
+ tparam := tmpse.X.Type()
+ assert(tparam.IsTypeParam())
+ recvType := targs[tparam.Index()]
+ if recvType.IsInterface() || len(recvType.RParams()) == 0 {
+ // No sub-dictionary entry is
+ // actually needed, since the
+ // type arg is not an
+ // instantiated type that
+ // will have generic methods.
+ break
+ }
+ // This is a method call for an
+ // instantiated type, so we need a
+ // sub-dictionary.
+ targs := recvType.RParams()
+ genRecvType := recvType.OrigSym.Def.Type()
+ nameNode = typecheck.Lookdot1(call.X, se.Sel, genRecvType, genRecvType.Methods(), 1).Nname.(*ir.Name)
+ sym = g.getDictionarySym(nameNode, targs, true)
+ } else {
+ // This is the case of a normal
+ // method call on a generic type.
+ nameNode = call.X.(*ir.SelectorExpr).Selection.Nname.(*ir.Name)
+ subtargs := deref(call.X.(*ir.SelectorExpr).X.Type()).RParams()
+ s2targs := make([]*types.Type, len(subtargs))
+ for i, t := range subtargs {
+ s2targs[i] = subst.Typ(t)
+ }
+ sym = g.getDictionarySym(nameNode, s2targs, true)
+ }
+ } else {
+ inst := call.X.(*ir.InstExpr)
+ var nameNode *ir.Name
+ var meth *ir.SelectorExpr
+ var isMeth bool
+ if meth, isMeth = inst.X.(*ir.SelectorExpr); isMeth {
+ nameNode = meth.Selection.Nname.(*ir.Name)
+ } else {
+ nameNode = inst.X.(*ir.Name)
+ }
+ subtargs := typecheck.TypesOf(inst.Targs)
+ for i, t := range subtargs {
+ subtargs[i] = subst.Typ(t)
+ }
+ sym = g.getDictionarySym(nameNode, subtargs, isMeth)
}
+
+ case ir.OFUNCINST:
+ inst := n.(*ir.InstExpr)
+ nameNode := inst.X.(*ir.Name)
+ subtargs := typecheck.TypesOf(inst.Targs)
+ for i, t := range subtargs {
+ subtargs[i] = subst.Typ(t)
+ }
+ sym = g.getDictionarySym(nameNode, subtargs, false)
+
+ case ir.OXDOT:
+ selExpr := n.(*ir.SelectorExpr)
+ subtargs := deref(selExpr.X.Type()).RParams()
+ s2targs := make([]*types.Type, len(subtargs))
+ for i, t := range subtargs {
+ s2targs[i] = subst.Typ(t)
+ }
+ nameNode := selExpr.Selection.Nname.(*ir.Name)
+ sym = g.getDictionarySym(nameNode, s2targs, true)
+
+ default:
+ assert(false)
}
- if newfields != nil {
- newfields[i] = types.NewField(f.Pos, f.Sym, t2)
+
+ if sym == nil {
+ // Unused sub-dictionary entry, just emit 0.
+ off = objw.Uintptr(lsym, off, 0)
+ infoPrint(" - Unused subdict entry\n")
+ } else {
+ off = objw.SymPtr(lsym, off, sym.Linksym(), 0)
+ infoPrint(" - Subdict %v\n", sym.Name)
}
}
- if newfields != nil {
- return types.NewInterface(t.Pkg(), newfields)
+
+ delay := &delayInfo{
+ gf: gf,
+ targs: targs,
+ sym: sym,
+ off: off,
}
- return t
+ g.dictSymsToFinalize = append(g.dictSymsToFinalize, delay)
+ g.instTypeList = append(g.instTypeList, subst.InstTypeList...)
+ return sym
}
-// instTypeName creates a name for an instantiated type, based on the name of the
-// generic type and the type args
-func instTypeName(name string, targs []*types.Type) string {
- b := bytes.NewBufferString(name)
- b.WriteByte('[')
- for i, targ := range targs {
- if i > 0 {
- b.WriteByte(',')
- }
- b.WriteString(targ.String())
- }
- b.WriteByte(']')
- return b.String()
-}
-
-// typ computes the type obtained by substituting any type parameter in t with the
-// corresponding type argument in subst. If t contains no type parameters, the
-// result is t; otherwise the result is a new type. It deals with recursive types
-// by using TFORW types and finding partially or fully created types via sym.Def.
-func (subst *subster) typ(t *types.Type) *types.Type {
- if !t.HasTParam() && t.Kind() != types.TFUNC {
- // Note: function types need to be copied regardless, as the
- // types of closures may contain declarations that need
- // to be copied. See #45738.
- return t
- }
-
- if t.Kind() == types.TTYPEPARAM {
- for i, tp := range subst.tparams {
- if tp.Type == t {
- return subst.targs[i].Type()
- }
- }
- // If t is a simple typeparam T, then t has the name/symbol 'T'
- // and t.Underlying() == t.
- //
- // However, consider the type definition: 'type P[T any] T'. We
- // might use this definition so we can have a variant of type T
- // that we can add new methods to. Suppose t is a reference to
- // P[T]. t has the name 'P[T]', but its kind is TTYPEPARAM,
- // because P[T] is defined as T. If we look at t.Underlying(), it
- // is different, because the name of t.Underlying() is 'T' rather
- // than 'P[T]'. But the kind of t.Underlying() is also TTYPEPARAM.
- // In this case, we do the needed recursive substitution in the
- // case statement below.
- if t.Underlying() == t {
- // t is a simple typeparam that didn't match anything in tparam
- return t
- }
- // t is a more complex typeparam (e.g. P[T], as above, whose
- // definition is just T).
- assert(t.Sym() != nil)
- }
-
- var newsym *types.Sym
- var neededTargs []*types.Type
- var forw *types.Type
-
- if t.Sym() != nil {
- // Translate the type params for this type according to
- // the tparam/targs mapping from subst.
- neededTargs = make([]*types.Type, len(t.RParams()))
- for i, rparam := range t.RParams() {
- neededTargs[i] = subst.typ(rparam)
- }
- // For a named (defined) type, we have to change the name of the
- // type as well. We do this first, so we can look up if we've
- // already seen this type during this substitution or other
- // definitions/substitutions.
- genName := genericTypeName(t.Sym())
- newsym = t.Sym().Pkg.Lookup(instTypeName(genName, neededTargs))
- if newsym.Def != nil {
- // We've already created this instantiated defined type.
- return newsym.Def.Type()
- }
-
- // In order to deal with recursive generic types, create a TFORW
- // type initially and set the Def field of its sym, so it can be
- // found if this type appears recursively within the type.
- forw = newIncompleteNamedType(t.Pos(), newsym)
- //println("Creating new type by sub", newsym.Name, forw.HasTParam())
- forw.SetRParams(neededTargs)
- }
-
- var newt *types.Type
+// finalizeSyms finishes up all dictionaries on g.dictSymsToFinalize, by writing out
+// any needed LSyms for itabs. The itab lsyms create wrappers which need various
+// dictionaries and method instantiations to be complete, so, to avoid recursive
+// dependencies, we finalize the itab lsyms only after all dictionaries syms and
+// instantiations have been created.
+func (g *irgen) finalizeSyms() {
+ for _, d := range g.dictSymsToFinalize {
+ infoPrint("=== Finalizing dictionary %s\n", d.sym.Name)
+
+ lsym := d.sym.Linksym()
+ info := g.getGfInfo(d.gf)
+
+ subst := typecheck.Tsubster{
+ Tparams: info.tparams,
+ Targs: d.targs,
+ }
- switch t.Kind() {
- case types.TTYPEPARAM:
- if t.Sym() == newsym {
- // The substitution did not change the type.
- return t
+ // Emit an entry for each itab
+ for _, n := range info.itabConvs {
+ var srctype, dsttype *types.Type
+ switch n.Op() {
+ case ir.OXDOT:
+ se := n.(*ir.SelectorExpr)
+ srctype = subst.Typ(se.X.Type())
+ dsttype = subst.Typ(se.X.Type().Bound())
+ found := false
+ for i, m := range dsttype.AllMethods().Slice() {
+ if se.Sel == m.Sym {
+ // Mark that this method se.Sel is
+ // used for the dsttype interface, so
+ // it won't get deadcoded.
+ reflectdata.MarkUsedIfaceMethodIndex(lsym, dsttype, i)
+ found = true
+ break
+ }
+ }
+ assert(found)
+ case ir.ODOTTYPE, ir.ODOTTYPE2:
+ srctype = subst.Typ(n.(*ir.TypeAssertExpr).Type())
+ dsttype = subst.Typ(n.(*ir.TypeAssertExpr).X.Type())
+ case ir.OCONVIFACE:
+ srctype = subst.Typ(n.(*ir.ConvExpr).X.Type())
+ dsttype = subst.Typ(n.Type())
+ case ir.OTYPE:
+ srctype = subst.Typ(n.Type())
+ dsttype = subst.Typ(info.type2switchType[n])
+ default:
+ base.Fatalf("itab entry with unknown op %s", n.Op())
+ }
+ if srctype.IsInterface() {
+ // No itab is wanted if src type is an interface. We
+ // will use a type assert instead.
+ d.off = objw.Uintptr(lsym, d.off, 0)
+ infoPrint(" + Unused itab entry for %v\n", srctype)
+ } else {
+ itabLsym := reflectdata.ITabLsym(srctype, dsttype)
+ d.off = objw.SymPtr(lsym, d.off, itabLsym, 0)
+ infoPrint(" + Itab for (%v,%v)\n", srctype, dsttype)
+ }
}
- // Substitute the underlying typeparam (e.g. T in P[T], see
- // the example describing type P[T] above).
- newt = subst.typ(t.Underlying())
- assert(newt != t)
- case types.TARRAY:
- elem := t.Elem()
- newelem := subst.typ(elem)
- if newelem != elem {
- newt = types.NewArray(newelem, t.NumElem())
+ objw.Global(lsym, int32(d.off), obj.DUPOK|obj.RODATA)
+ infoPrint("=== Finalized dictionary %s\n", d.sym.Name)
+
+ g.instTypeList = append(g.instTypeList, subst.InstTypeList...)
+ }
+ g.dictSymsToFinalize = nil
+}
+
+func (g *irgen) getDictionaryValue(gf *ir.Name, targs []*types.Type, isMeth bool) ir.Node {
+ sym := g.getDictionarySym(gf, targs, isMeth)
+
+ // Make (or reuse) a node referencing the dictionary symbol.
+ var n *ir.Name
+ if sym.Def != nil {
+ n = sym.Def.(*ir.Name)
+ } else {
+ n = typecheck.NewName(sym)
+ n.SetType(types.Types[types.TUINTPTR]) // should probably be [...]uintptr, but doesn't really matter
+ n.SetTypecheck(1)
+ n.Class = ir.PEXTERN
+ sym.Def = n
+ }
+
+ // Return the address of the dictionary.
+ np := typecheck.NodAddr(n)
+ // Note: treat dictionary pointers as uintptrs, so they aren't pointers
+ // with respect to GC. That saves on stack scanning work, write barriers, etc.
+ // We can get away with it because dictionaries are global variables.
+ // TODO: use a cast, or is typing directly ok?
+ np.SetType(types.Types[types.TUINTPTR])
+ np.SetTypecheck(1)
+ return np
+}
+
+// hasTParamNodes returns true if the type of any node in targs has a typeparam.
+func hasTParamNodes(targs []ir.Node) bool {
+ for _, n := range targs {
+ if n.Type().HasTParam() {
+ return true
}
+ }
+ return false
+}
+
+// hasTParamNodes returns true if any type in targs has a typeparam.
+func hasTParamTypes(targs []*types.Type) bool {
+ for _, t := range targs {
+ if t.HasTParam() {
+ return true
+ }
+ }
+ return false
+}
- case types.TPTR:
- elem := t.Elem()
- newelem := subst.typ(elem)
- if newelem != elem {
- newt = types.NewPtr(newelem)
+// getGfInfo get information for a generic function - type params, derived generic
+// types, and subdictionaries.
+func (g *irgen) getGfInfo(gn *ir.Name) *gfInfo {
+ infop := g.gfInfoMap[gn.Sym()]
+ if infop != nil {
+ return infop
+ }
+
+ checkFetchBody(gn)
+ var info gfInfo
+ gf := gn.Func
+ recv := gf.Type().Recv()
+ if recv != nil {
+ info.tparams = deref(recv.Type).RParams()
+ } else {
+ tparams := gn.Type().TParams().FieldSlice()
+ info.tparams = make([]*types.Type, len(tparams))
+ for i, f := range tparams {
+ info.tparams[i] = f.Type
}
+ }
- case types.TSLICE:
- elem := t.Elem()
- newelem := subst.typ(elem)
- if newelem != elem {
- newt = types.NewSlice(newelem)
+ for _, t := range info.tparams {
+ b := t.Bound()
+ if b.HasTParam() {
+ // If a type bound is parameterized (unusual case), then we
+ // may need its derived type to do a type assert when doing a
+ // bound call for a type arg that is an interface.
+ addType(&info, nil, b)
}
+ }
- case types.TSTRUCT:
- newt = subst.tstruct(t, false)
- if newt == t {
- newt = nil
+ for _, n := range gf.Dcl {
+ addType(&info, n, n.Type())
+ }
+
+ if infoPrintMode {
+ fmt.Printf(">>> GfInfo for %v\n", gn)
+ for _, t := range info.tparams {
+ fmt.Printf(" Typeparam %v\n", t)
}
+ }
- case types.TFUNC:
- newrecvs := subst.tstruct(t.Recvs(), false)
- newparams := subst.tstruct(t.Params(), false)
- newresults := subst.tstruct(t.Results(), false)
- if newrecvs != t.Recvs() || newparams != t.Params() || newresults != t.Results() {
- // If any types have changed, then the all the fields of
- // of recv, params, and results must be copied, because they have
- // offset fields that are dependent, and so must have an
- // independent copy for each new signature.
- var newrecv *types.Field
- if newrecvs.NumFields() > 0 {
- if newrecvs == t.Recvs() {
- newrecvs = subst.tstruct(t.Recvs(), true)
+ var visitFunc func(ir.Node)
+ visitFunc = func(n ir.Node) {
+ if n.Op() == ir.OFUNCINST && !n.(*ir.InstExpr).Implicit() {
+ if hasTParamNodes(n.(*ir.InstExpr).Targs) {
+ infoPrint(" Closure&subdictionary required at generic function value %v\n", n.(*ir.InstExpr).X)
+ info.subDictCalls = append(info.subDictCalls, n)
+ }
+ } else if n.Op() == ir.OXDOT && !n.(*ir.SelectorExpr).Implicit() &&
+ n.(*ir.SelectorExpr).Selection != nil &&
+ len(deref(n.(*ir.SelectorExpr).X.Type()).RParams()) > 0 {
+ if hasTParamTypes(deref(n.(*ir.SelectorExpr).X.Type()).RParams()) {
+ if n.(*ir.SelectorExpr).X.Op() == ir.OTYPE {
+ infoPrint(" Closure&subdictionary required at generic meth expr %v\n", n)
+ } else {
+ infoPrint(" Closure&subdictionary required at generic meth value %v\n", n)
}
- newrecv = newrecvs.Field(0)
+ info.subDictCalls = append(info.subDictCalls, n)
}
- if newparams == t.Params() {
- newparams = subst.tstruct(t.Params(), true)
+ }
+ if n.Op() == ir.OCALL && n.(*ir.CallExpr).X.Op() == ir.OFUNCINST {
+ n.(*ir.CallExpr).X.(*ir.InstExpr).SetImplicit(true)
+ if hasTParamNodes(n.(*ir.CallExpr).X.(*ir.InstExpr).Targs) {
+ infoPrint(" Subdictionary at generic function/method call: %v - %v\n", n.(*ir.CallExpr).X.(*ir.InstExpr).X, n)
+ info.subDictCalls = append(info.subDictCalls, n)
}
- if newresults == t.Results() {
- newresults = subst.tstruct(t.Results(), true)
+ }
+ if n.Op() == ir.OCALL && n.(*ir.CallExpr).X.Op() == ir.OXDOT &&
+ n.(*ir.CallExpr).X.(*ir.SelectorExpr).Selection != nil &&
+ len(deref(n.(*ir.CallExpr).X.(*ir.SelectorExpr).X.Type()).RParams()) > 0 {
+ n.(*ir.CallExpr).X.(*ir.SelectorExpr).SetImplicit(true)
+ if hasTParamTypes(deref(n.(*ir.CallExpr).X.(*ir.SelectorExpr).X.Type()).RParams()) {
+ infoPrint(" Subdictionary at generic method call: %v\n", n)
+ info.subDictCalls = append(info.subDictCalls, n)
}
- newt = types.NewSignature(t.Pkg(), newrecv, t.TParams().FieldSlice(), newparams.FieldSlice(), newresults.FieldSlice())
}
-
- case types.TINTER:
- newt = subst.tinter(t)
- if newt == t {
- newt = nil
+ if n.Op() == ir.OCALL && n.(*ir.CallExpr).X.Op() == ir.OXDOT &&
+ n.(*ir.CallExpr).X.(*ir.SelectorExpr).Selection != nil &&
+ deref(n.(*ir.CallExpr).X.(*ir.SelectorExpr).X.Type()).IsTypeParam() {
+ n.(*ir.CallExpr).X.(*ir.SelectorExpr).SetImplicit(true)
+ infoPrint(" Optional subdictionary at generic bound call: %v\n", n)
+ info.subDictCalls = append(info.subDictCalls, n)
}
-
- case types.TMAP:
- newkey := subst.typ(t.Key())
- newval := subst.typ(t.Elem())
- if newkey != t.Key() || newval != t.Elem() {
- newt = types.NewMap(newkey, newval)
- }
-
- case types.TCHAN:
- elem := t.Elem()
- newelem := subst.typ(elem)
- if newelem != elem {
- newt = types.NewChan(newelem, t.ChanDir())
- if !newt.HasTParam() {
- // TODO(danscales): not sure why I have to do this
- // only for channels.....
- types.CheckSize(newt)
- }
- }
- }
- if newt == nil {
- // Even though there were typeparams in the type, there may be no
- // change if this is a function type for a function call (which will
- // have its own tparams/targs in the function instantiation).
- return t
- }
-
- if t.Sym() == nil {
- // Not a named type, so there was no forwarding type and there are
- // no methods to substitute.
- assert(t.Methods().Len() == 0)
- return newt
- }
-
- forw.SetUnderlying(newt)
- newt = forw
-
- if t.Kind() != types.TINTER && t.Methods().Len() > 0 {
- // Fill in the method info for the new type.
- var newfields []*types.Field
- newfields = make([]*types.Field, t.Methods().Len())
- for i, f := range t.Methods().Slice() {
- t2 := subst.typ(f.Type)
- oldsym := f.Nname.Sym()
- newsym := makeInstName(oldsym, subst.targs, true)
- var nname *ir.Name
- if newsym.Def != nil {
- nname = newsym.Def.(*ir.Name)
- } else {
- nname = ir.NewNameAt(f.Pos, newsym)
- nname.SetType(t2)
- newsym.Def = nname
+ if n.Op() == ir.OCONVIFACE && n.Type().IsInterface() &&
+ !n.Type().IsEmptyInterface() &&
+ n.(*ir.ConvExpr).X.Type().HasTParam() {
+ infoPrint(" Itab for interface conv: %v\n", n)
+ info.itabConvs = append(info.itabConvs, n)
+ }
+ if n.Op() == ir.OXDOT && n.(*ir.SelectorExpr).X.Type().IsTypeParam() {
+ infoPrint(" Itab for bound call: %v\n", n)
+ info.itabConvs = append(info.itabConvs, n)
+ }
+ if (n.Op() == ir.ODOTTYPE || n.Op() == ir.ODOTTYPE2) && !n.(*ir.TypeAssertExpr).Type().IsInterface() && !n.(*ir.TypeAssertExpr).X.Type().IsEmptyInterface() {
+ infoPrint(" Itab for dot type: %v\n", n)
+ info.itabConvs = append(info.itabConvs, n)
+ }
+ if n.Op() == ir.OCLOSURE {
+ // Visit the closure body and add all relevant entries to the
+ // dictionary of the outer function (closure will just use
+ // the dictionary of the outer function).
+ for _, n1 := range n.(*ir.ClosureExpr).Func.Body {
+ ir.Visit(n1, visitFunc)
}
- newfields[i] = types.NewField(f.Pos, f.Sym, t2)
- newfields[i].Nname = nname
}
- newt.Methods().Set(newfields)
- if !newt.HasTParam() {
- // Generate all the methods for a new fully-instantiated type.
- subst.g.instTypeList = append(subst.g.instTypeList, newt)
+ if n.Op() == ir.OSWITCH && n.(*ir.SwitchStmt).Tag != nil && n.(*ir.SwitchStmt).Tag.Op() == ir.OTYPESW && !n.(*ir.SwitchStmt).Tag.(*ir.TypeSwitchGuard).X.Type().IsEmptyInterface() {
+ for _, cc := range n.(*ir.SwitchStmt).Cases {
+ for _, c := range cc.List {
+ if c.Op() == ir.OTYPE && c.Type().HasTParam() {
+ // Type switch from a non-empty interface - might need an itab.
+ infoPrint(" Itab for type switch: %v\n", c)
+ info.itabConvs = append(info.itabConvs, c)
+ if info.type2switchType == nil {
+ info.type2switchType = map[ir.Node]*types.Type{}
+ }
+ info.type2switchType[c] = n.(*ir.SwitchStmt).Tag.(*ir.TypeSwitchGuard).X.Type()
+ }
+ }
+ }
+ }
+ addType(&info, n, n.Type())
+ }
+
+ for _, stmt := range gf.Body {
+ ir.Visit(stmt, visitFunc)
+ }
+ if infoPrintMode {
+ for _, t := range info.derivedTypes {
+ fmt.Printf(" Derived type %v\n", t)
}
+ fmt.Printf(">>> Done Gfinfo\n")
}
- return newt
+ g.gfInfoMap[gn.Sym()] = &info
+ return &info
}
-// fields sets the Nname field for the Field nodes inside a type signature, based
-// on the corresponding in/out parameters in dcl. It depends on the in and out
-// parameters being in order in dcl.
-func (subst *subster) fields(class ir.Class, oldfields []*types.Field, dcl []*ir.Name) []*types.Field {
- // Find the starting index in dcl of declarations of the class (either
- // PPARAM or PPARAMOUT).
- var i int
- for i = range dcl {
- if dcl[i].Class == class {
- break
+// addType adds t to info.derivedTypes if it is parameterized type (which is not
+// just a simple type param) that is different from any existing type on
+// info.derivedTypes.
+func addType(info *gfInfo, n ir.Node, t *types.Type) {
+ if t == nil || !t.HasTParam() {
+ return
+ }
+ if t.IsTypeParam() && t.Underlying() == t {
+ return
+ }
+ if t.Kind() == types.TFUNC && n != nil &&
+ (t.Recv() != nil ||
+ n.Op() == ir.ONAME && n.Name().Class == ir.PFUNC) {
+ // Don't use the type of a named generic function or method,
+ // since that is parameterized by other typeparams.
+ // (They all come from arguments of a FUNCINST node.)
+ return
+ }
+ if doubleCheck && !parameterizedBy(t, info.tparams) {
+ base.Fatalf("adding type with invalid parameters %+v", t)
+ }
+ if t.Kind() == types.TSTRUCT && t.IsFuncArgStruct() {
+ // Multiple return values are not a relevant new type (?).
+ return
+ }
+ // Ignore a derived type we've already added.
+ for _, et := range info.derivedTypes {
+ if types.Identical(t, et) {
+ return
}
}
+ info.derivedTypes = append(info.derivedTypes, t)
+}
- // Create newfields nodes that are copies of the oldfields nodes, but
- // with substitution for any type params, and with Nname set to be the node in
- // Dcl for the corresponding PPARAM or PPARAMOUT.
- newfields := make([]*types.Field, len(oldfields))
- for j := range oldfields {
- newfields[j] = oldfields[j].Copy()
- newfields[j].Type = subst.typ(oldfields[j].Type)
- // A param field will be missing from dcl if its name is
- // unspecified or specified as "_". So, we compare the dcl sym
- // with the field sym. If they don't match, this dcl (if there is
- // one left) must apply to a later field.
- if i < len(dcl) && dcl[i].Sym() == oldfields[j].Sym {
- newfields[j].Nname = dcl[i]
- i++
+// parameterizedBy returns true if t is parameterized by (at most) params.
+func parameterizedBy(t *types.Type, params []*types.Type) bool {
+ return parameterizedBy1(t, params, map[*types.Type]bool{})
+}
+func parameterizedBy1(t *types.Type, params []*types.Type, visited map[*types.Type]bool) bool {
+ if visited[t] {
+ return true
+ }
+ visited[t] = true
+
+ if t.Sym() != nil && len(t.RParams()) > 0 {
+ // This defined type is instantiated. Check the instantiating types.
+ for _, r := range t.RParams() {
+ if !parameterizedBy1(r, params, visited) {
+ return false
+ }
}
+ return true
+ }
+ switch t.Kind() {
+ case types.TTYPEPARAM:
+ // Check if t is one of the allowed parameters in scope.
+ for _, p := range params {
+ if p == t {
+ return true
+ }
+ }
+ // Couldn't find t in the list of allowed parameters.
+ return false
+
+ case types.TARRAY, types.TPTR, types.TSLICE, types.TCHAN:
+ return parameterizedBy1(t.Elem(), params, visited)
+
+ case types.TMAP:
+ return parameterizedBy1(t.Key(), params, visited) && parameterizedBy1(t.Elem(), params, visited)
+
+ case types.TFUNC:
+ return parameterizedBy1(t.TParams(), params, visited) && parameterizedBy1(t.Recvs(), params, visited) && parameterizedBy1(t.Params(), params, visited) && parameterizedBy1(t.Results(), params, visited)
+
+ case types.TSTRUCT:
+ for _, f := range t.Fields().Slice() {
+ if !parameterizedBy1(f.Type, params, visited) {
+ return false
+ }
+ }
+ return true
+
+ case types.TINTER:
+ for _, f := range t.Methods().Slice() {
+ if !parameterizedBy1(f.Type, params, visited) {
+ return false
+ }
+ }
+ return true
+
+ case types.TINT, types.TINT8, types.TINT16, types.TINT32, types.TINT64,
+ types.TUINT, types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64,
+ types.TUINTPTR, types.TBOOL, types.TSTRING, types.TFLOAT32, types.TFLOAT64, types.TCOMPLEX64, types.TCOMPLEX128:
+ return true
+
+ case types.TUNION:
+ for i := 0; i < t.NumTerms(); i++ {
+ tt, _ := t.Term(i)
+ if !parameterizedBy1(tt, params, visited) {
+ return false
+ }
+ }
+ return true
+
+ default:
+ base.Fatalf("bad type kind %+v", t)
+ return true
}
- return newfields
}
-// defer does a single defer of type t, if it is a pointer type.
-func deref(t *types.Type) *types.Type {
- if t.IsPtr() {
- return t.Elem()
+// startClosures starts creation of a closure that has the function type typ. It
+// creates all the formal params and results according to the type typ. On return,
+// the body and closure variables of the closure must still be filled in, and
+// ir.UseClosure() called.
+func startClosure(pos src.XPos, outer *ir.Func, typ *types.Type) (*ir.Func, []*types.Field, []*types.Field) {
+ // Make a new internal function.
+ fn := ir.NewClosureFunc(pos, outer != nil)
+ ir.NameClosure(fn.OClosure, outer)
+
+ // Build formal argument and return lists.
+ var formalParams []*types.Field // arguments of closure
+ var formalResults []*types.Field // returns of closure
+ for i := 0; i < typ.NumParams(); i++ {
+ t := typ.Params().Field(i).Type
+ arg := ir.NewNameAt(pos, typecheck.LookupNum("a", i))
+ arg.Class = ir.PPARAM
+ typed(t, arg)
+ arg.Curfn = fn
+ fn.Dcl = append(fn.Dcl, arg)
+ f := types.NewField(pos, arg.Sym(), t)
+ f.Nname = arg
+ formalParams = append(formalParams, f)
}
- return t
+ for i := 0; i < typ.NumResults(); i++ {
+ t := typ.Results().Field(i).Type
+ result := ir.NewNameAt(pos, typecheck.LookupNum("r", i)) // TODO: names not needed?
+ result.Class = ir.PPARAMOUT
+ typed(t, result)
+ result.Curfn = fn
+ fn.Dcl = append(fn.Dcl, result)
+ f := types.NewField(pos, result.Sym(), t)
+ f.Nname = result
+ formalResults = append(formalResults, f)
+ }
+
+ // Build an internal function with the right signature.
+ closureType := types.NewSignature(typ.Pkg(), nil, nil, formalParams, formalResults)
+ typed(closureType, fn.Nname)
+ typed(typ, fn.OClosure)
+ fn.SetTypecheck(1)
+ return fn, formalParams, formalResults
+
+}
+
+// assertToBound returns a new node that converts a node rcvr with interface type to
+// the 'dst' interface type. bound is the unsubstituted form of dst.
+func assertToBound(info *instInfo, dictVar *ir.Name, pos src.XPos, rcvr ir.Node, bound, dst *types.Type) ir.Node {
+ if bound.HasTParam() {
+ ix := findDictType(info, bound)
+ assert(ix >= 0)
+ rt := getDictionaryType(info, dictVar, pos, ix)
+ rcvr = ir.NewDynamicTypeAssertExpr(pos, ir.ODYNAMICDOTTYPE, rcvr, rt)
+ typed(dst, rcvr)
+ } else {
+ rcvr = ir.NewTypeAssertExpr(pos, rcvr, nil)
+ typed(bound, rcvr)
+ }
+ return rcvr
}
-// newIncompleteNamedType returns a TFORW type t with name specified by sym, such
-// that t.nod and sym.Def are set correctly.
-func newIncompleteNamedType(pos src.XPos, sym *types.Sym) *types.Type {
- name := ir.NewDeclNameAt(pos, ir.OTYPE, sym)
- forw := types.NewNamed(name)
- name.SetType(forw)
- sym.Def = name
- return forw
+// buildClosure2 makes a closure to implement a method expression m (generic form x)
+// which has a shape type as receiver. If the receiver is exactly a shape (i.e. from
+// a typeparam), then the body of the closure converts m.X (the receiver) to the
+// interface bound type, and makes an interface call with the remaining arguments.
+//
+// The returned closure is fully substituted and has already had any needed
+// transformations done.
+func (g *irgen) buildClosure2(subst *subster, m, x ir.Node) ir.Node {
+ outer := subst.newf
+ info := subst.info
+ pos := m.Pos()
+ typ := m.Type() // type of the closure
+
+ fn, formalParams, formalResults := startClosure(pos, outer, typ)
+
+ // Capture dictionary calculated in the outer function
+ dictVar := ir.CaptureName(pos, fn, info.dictParam)
+ typed(types.Types[types.TUINTPTR], dictVar)
+
+ // Build arguments to call inside the closure.
+ var args []ir.Node
+ for i := 0; i < typ.NumParams(); i++ {
+ args = append(args, formalParams[i].Nname.(*ir.Name))
+ }
+
+ // Build call itself. This involves converting the first argument to the
+ // bound type (an interface) using the dictionary, and then making an
+ // interface call with the remaining arguments.
+ var innerCall ir.Node
+ rcvr := args[0]
+ args = args[1:]
+ assert(m.(*ir.SelectorExpr).X.Type().IsShape())
+ gsrc := x.(*ir.SelectorExpr).X.Type()
+ bound := gsrc.Bound()
+ dst := bound
+ if dst.HasTParam() {
+ dst = subst.ts.Typ(bound)
+ }
+ if m.(*ir.SelectorExpr).X.Type().IsInterface() {
+ // If type arg is an interface (unusual case), we do a type assert to
+ // the type bound.
+ rcvr = assertToBound(info, dictVar, pos, rcvr, bound, dst)
+ } else {
+ rcvr = convertUsingDictionary(info, dictVar, pos, rcvr, x, dst, gsrc)
+ }
+ dot := ir.NewSelectorExpr(pos, ir.ODOTINTER, rcvr, x.(*ir.SelectorExpr).Sel)
+ dot.Selection = typecheck.Lookdot1(dot, dot.Sel, dot.X.Type(), dot.X.Type().AllMethods(), 1)
+
+ // Do a type substitution on the generic bound, in case it is parameterized.
+ typed(subst.ts.Typ(x.(*ir.SelectorExpr).Selection.Type), dot)
+ innerCall = ir.NewCallExpr(pos, ir.OCALLINTER, dot, args)
+ t := m.Type()
+ if t.NumResults() == 0 {
+ innerCall.SetTypecheck(1)
+ } else if t.NumResults() == 1 {
+ typed(t.Results().Field(0).Type, innerCall)
+ } else {
+ typed(t.Results(), innerCall)
+ }
+ if len(formalResults) > 0 {
+ innerCall = ir.NewReturnStmt(pos, []ir.Node{innerCall})
+ innerCall.SetTypecheck(1)
+ }
+ fn.Body = []ir.Node{innerCall}
+
+ // We're all done with the captured dictionary
+ ir.FinishCaptureNames(pos, outer, fn)
+
+ // Do final checks on closure and return it.
+ return ir.UseClosure(fn.OClosure, g.target)
}
diff --git a/src/cmd/compile/internal/noder/stmt.go b/src/cmd/compile/internal/noder/stmt.go
index 32a1483b4a..fc1f5836ff 100644
--- a/src/cmd/compile/internal/noder/stmt.go
+++ b/src/cmd/compile/internal/noder/stmt.go
@@ -5,6 +5,7 @@
package noder
import (
+ "cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/syntax"
"cmd/compile/internal/typecheck"
@@ -27,6 +28,7 @@ func (g *irgen) stmts(stmts []syntax.Stmt) []ir.Node {
}
func (g *irgen) stmt(stmt syntax.Stmt) ir.Node {
+ base.Assert(g.exprStmtOK)
switch stmt := stmt.(type) {
case nil, *syntax.EmptyStmt:
return nil
@@ -35,11 +37,7 @@ func (g *irgen) stmt(stmt syntax.Stmt) ir.Node {
case *syntax.BlockStmt:
return ir.NewBlockStmt(g.pos(stmt), g.blockStmt(stmt))
case *syntax.ExprStmt:
- x := g.expr(stmt.X)
- if call, ok := x.(*ir.CallExpr); ok {
- call.Use = ir.CallUseStmt
- }
- return x
+ return g.expr(stmt.X)
case *syntax.SendStmt:
n := ir.NewSendStmt(g.pos(stmt), g.expr(stmt.Chan), g.expr(stmt.Value))
if n.Chan.Type().HasTParam() || n.Value.Type().HasTParam() {
@@ -52,7 +50,9 @@ func (g *irgen) stmt(stmt syntax.Stmt) ir.Node {
n.SetTypecheck(1)
return n
case *syntax.DeclStmt:
- return ir.NewBlockStmt(g.pos(stmt), g.decls(stmt.DeclList))
+ n := ir.NewBlockStmt(g.pos(stmt), nil)
+ g.decls(&n.List, stmt.DeclList)
+ return n
case *syntax.AssignStmt:
if stmt.Op != 0 && stmt.Op != syntax.Def {
@@ -61,7 +61,10 @@ func (g *irgen) stmt(stmt syntax.Stmt) ir.Node {
if stmt.Rhs == nil {
n = IncDec(g.pos(stmt), op, g.expr(stmt.Lhs))
} else {
- n = ir.NewAssignOpStmt(g.pos(stmt), op, g.expr(stmt.Lhs), g.expr(stmt.Rhs))
+ // Eval rhs before lhs, for compatibility with noder1
+ rhs := g.expr(stmt.Rhs)
+ lhs := g.expr(stmt.Lhs)
+ n = ir.NewAssignOpStmt(g.pos(stmt), op, lhs, rhs)
}
if n.X.Typecheck() == 3 {
n.SetTypecheck(3)
@@ -72,8 +75,9 @@ func (g *irgen) stmt(stmt syntax.Stmt) ir.Node {
return n
}
- names, lhs := g.assignList(stmt.Lhs, stmt.Op == syntax.Def)
+ // Eval rhs before lhs, for compatibility with noder1
rhs := g.exprList(stmt.Rhs)
+ names, lhs := g.assignList(stmt.Lhs, stmt.Op == syntax.Def)
// We must delay transforming the assign statement if any of the
// lhs or rhs nodes are also delayed, since transformAssign needs
@@ -128,6 +132,12 @@ func (g *irgen) stmt(stmt syntax.Stmt) ir.Node {
if e.Type().HasTParam() {
// Delay transforming the return statement if any of the
// return values have a type param.
+ if !ir.HasNamedResults(ir.CurFunc) {
+ transformArgs(n)
+ // But add CONVIFACE nodes where needed if
+ // any of the return values have interface type.
+ typecheckaste(ir.ORETURN, nil, false, ir.CurFunc.Type().Results(), n.Results, true)
+ }
n.SetTypecheck(3)
return n
}
@@ -266,6 +276,12 @@ func (g *irgen) forStmt(stmt *syntax.ForStmt) ir.Node {
key, value := unpackTwo(lhs)
n := ir.NewRangeStmt(g.pos(r), key, value, g.expr(r.X), g.blockStmt(stmt.Body))
n.Def = initDefn(n, names)
+ if key != nil {
+ transformCheckAssign(n, key)
+ }
+ if value != nil {
+ transformCheckAssign(n, value)
+ }
return n
}
diff --git a/src/cmd/compile/internal/noder/sync.go b/src/cmd/compile/internal/noder/sync.go
new file mode 100644
index 0000000000..7af558f8b2
--- /dev/null
+++ b/src/cmd/compile/internal/noder/sync.go
@@ -0,0 +1,187 @@
+// UNREVIEWED
+
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package noder
+
+import (
+ "fmt"
+ "strings"
+)
+
+// enableSync controls whether sync markers are written into unified
+// IR's export data format and also whether they're expected when
+// reading them back in. They're inessential to the correct
+// functioning of unified IR, but are helpful during development to
+// detect mistakes.
+//
+// When sync is enabled, writer stack frames will also be included in
+// the export data. Currently, a fixed number of frames are included,
+// controlled by -d=syncframes (default 0).
+const enableSync = true
+
+// fmtFrames formats a backtrace for reporting reader/writer desyncs.
+func fmtFrames(pcs ...uintptr) []string {
+ res := make([]string, 0, len(pcs))
+ walkFrames(pcs, func(file string, line int, name string, offset uintptr) {
+ // Trim package from function name. It's just redundant noise.
+ name = strings.TrimPrefix(name, "cmd/compile/internal/noder.")
+
+ res = append(res, fmt.Sprintf("%s:%v: %s +0x%v", file, line, name, offset))
+ })
+ return res
+}
+
+type frameVisitor func(file string, line int, name string, offset uintptr)
+
+// syncMarker is an enum type that represents markers that may be
+// written to export data to ensure the reader and writer stay
+// synchronized.
+type syncMarker int
+
+//go:generate stringer -type=syncMarker -trimprefix=sync
+
+// TODO(mdempsky): Cleanup unneeded sync markers.
+
+// TODO(mdempsky): Split these markers into public/stable markers, and
+// private ones. Also, trim unused ones.
+const (
+ _ syncMarker = iota
+ syncNode
+ syncBool
+ syncInt64
+ syncUint64
+ syncString
+ syncPos
+ syncPkg
+ syncSym
+ syncSelector
+ syncKind
+ syncType
+ syncTypePkg
+ syncSignature
+ syncParam
+ syncOp
+ syncObject
+ syncExpr
+ syncStmt
+ syncDecl
+ syncConstDecl
+ syncFuncDecl
+ syncTypeDecl
+ syncVarDecl
+ syncPragma
+ syncValue
+ syncEOF
+ syncMethod
+ syncFuncBody
+ syncUse
+ syncUseObj
+ syncObjectIdx
+ syncTypeIdx
+ syncBOF
+ syncEntry
+ syncOpenScope
+ syncCloseScope
+ syncGlobal
+ syncLocal
+ syncDefine
+ syncDefLocal
+ syncUseLocal
+ syncDefGlobal
+ syncUseGlobal
+ syncTypeParams
+ syncUseLabel
+ syncDefLabel
+ syncFuncLit
+ syncCommonFunc
+ syncBodyRef
+ syncLinksymExt
+ syncHack
+ syncSetlineno
+ syncName
+ syncImportDecl
+ syncDeclNames
+ syncDeclName
+ syncExprList
+ syncExprs
+ syncWrapname
+ syncTypeExpr
+ syncTypeExprOrNil
+ syncChanDir
+ syncParams
+ syncCloseAnotherScope
+ syncSum
+ syncUnOp
+ syncBinOp
+ syncStructType
+ syncInterfaceType
+ syncPackname
+ syncEmbedded
+ syncStmts
+ syncStmtsFall
+ syncStmtFall
+ syncBlockStmt
+ syncIfStmt
+ syncForStmt
+ syncSwitchStmt
+ syncRangeStmt
+ syncCaseClause
+ syncCommClause
+ syncSelectStmt
+ syncDecls
+ syncLabeledStmt
+ syncCompLit
+
+ sync1
+ sync2
+ sync3
+ sync4
+
+ syncN
+ syncDefImplicit
+ syncUseName
+ syncUseObjLocal
+ syncAddLocal
+ syncBothSignature
+ syncSetUnderlying
+ syncLinkname
+ syncStmt1
+ syncStmtsEnd
+ syncDeclare
+ syncTopDecls
+ syncTopConstDecl
+ syncTopFuncDecl
+ syncTopTypeDecl
+ syncTopVarDecl
+ syncObject1
+ syncAddBody
+ syncLabel
+ syncFuncExt
+ syncMethExt
+ syncOptLabel
+ syncScalar
+ syncStmtDecls
+ syncDeclLocal
+ syncObjLocal
+ syncObjLocal1
+ syncDeclareLocal
+ syncPublic
+ syncPrivate
+ syncRelocs
+ syncReloc
+ syncUseReloc
+ syncVarExt
+ syncPkgDef
+ syncTypeExt
+ syncVal
+ syncCodeObj
+ syncPosBase
+ syncLocalIdent
+ syncTypeParamNames
+ syncTypeParamBounds
+ syncImplicitTypes
+ syncObjectName
+)
diff --git a/src/cmd/compile/internal/noder/syncmarker_string.go b/src/cmd/compile/internal/noder/syncmarker_string.go
new file mode 100644
index 0000000000..655cafc950
--- /dev/null
+++ b/src/cmd/compile/internal/noder/syncmarker_string.go
@@ -0,0 +1,156 @@
+// Code generated by "stringer -type=syncMarker -trimprefix=sync"; DO NOT EDIT.
+
+package noder
+
+import "strconv"
+
+func _() {
+ // An "invalid array index" compiler error signifies that the constant values have changed.
+ // Re-run the stringer command to generate them again.
+ var x [1]struct{}
+ _ = x[syncNode-1]
+ _ = x[syncBool-2]
+ _ = x[syncInt64-3]
+ _ = x[syncUint64-4]
+ _ = x[syncString-5]
+ _ = x[syncPos-6]
+ _ = x[syncPkg-7]
+ _ = x[syncSym-8]
+ _ = x[syncSelector-9]
+ _ = x[syncKind-10]
+ _ = x[syncType-11]
+ _ = x[syncTypePkg-12]
+ _ = x[syncSignature-13]
+ _ = x[syncParam-14]
+ _ = x[syncOp-15]
+ _ = x[syncObject-16]
+ _ = x[syncExpr-17]
+ _ = x[syncStmt-18]
+ _ = x[syncDecl-19]
+ _ = x[syncConstDecl-20]
+ _ = x[syncFuncDecl-21]
+ _ = x[syncTypeDecl-22]
+ _ = x[syncVarDecl-23]
+ _ = x[syncPragma-24]
+ _ = x[syncValue-25]
+ _ = x[syncEOF-26]
+ _ = x[syncMethod-27]
+ _ = x[syncFuncBody-28]
+ _ = x[syncUse-29]
+ _ = x[syncUseObj-30]
+ _ = x[syncObjectIdx-31]
+ _ = x[syncTypeIdx-32]
+ _ = x[syncBOF-33]
+ _ = x[syncEntry-34]
+ _ = x[syncOpenScope-35]
+ _ = x[syncCloseScope-36]
+ _ = x[syncGlobal-37]
+ _ = x[syncLocal-38]
+ _ = x[syncDefine-39]
+ _ = x[syncDefLocal-40]
+ _ = x[syncUseLocal-41]
+ _ = x[syncDefGlobal-42]
+ _ = x[syncUseGlobal-43]
+ _ = x[syncTypeParams-44]
+ _ = x[syncUseLabel-45]
+ _ = x[syncDefLabel-46]
+ _ = x[syncFuncLit-47]
+ _ = x[syncCommonFunc-48]
+ _ = x[syncBodyRef-49]
+ _ = x[syncLinksymExt-50]
+ _ = x[syncHack-51]
+ _ = x[syncSetlineno-52]
+ _ = x[syncName-53]
+ _ = x[syncImportDecl-54]
+ _ = x[syncDeclNames-55]
+ _ = x[syncDeclName-56]
+ _ = x[syncExprList-57]
+ _ = x[syncExprs-58]
+ _ = x[syncWrapname-59]
+ _ = x[syncTypeExpr-60]
+ _ = x[syncTypeExprOrNil-61]
+ _ = x[syncChanDir-62]
+ _ = x[syncParams-63]
+ _ = x[syncCloseAnotherScope-64]
+ _ = x[syncSum-65]
+ _ = x[syncUnOp-66]
+ _ = x[syncBinOp-67]
+ _ = x[syncStructType-68]
+ _ = x[syncInterfaceType-69]
+ _ = x[syncPackname-70]
+ _ = x[syncEmbedded-71]
+ _ = x[syncStmts-72]
+ _ = x[syncStmtsFall-73]
+ _ = x[syncStmtFall-74]
+ _ = x[syncBlockStmt-75]
+ _ = x[syncIfStmt-76]
+ _ = x[syncForStmt-77]
+ _ = x[syncSwitchStmt-78]
+ _ = x[syncRangeStmt-79]
+ _ = x[syncCaseClause-80]
+ _ = x[syncCommClause-81]
+ _ = x[syncSelectStmt-82]
+ _ = x[syncDecls-83]
+ _ = x[syncLabeledStmt-84]
+ _ = x[syncCompLit-85]
+ _ = x[sync1-86]
+ _ = x[sync2-87]
+ _ = x[sync3-88]
+ _ = x[sync4-89]
+ _ = x[syncN-90]
+ _ = x[syncDefImplicit-91]
+ _ = x[syncUseName-92]
+ _ = x[syncUseObjLocal-93]
+ _ = x[syncAddLocal-94]
+ _ = x[syncBothSignature-95]
+ _ = x[syncSetUnderlying-96]
+ _ = x[syncLinkname-97]
+ _ = x[syncStmt1-98]
+ _ = x[syncStmtsEnd-99]
+ _ = x[syncDeclare-100]
+ _ = x[syncTopDecls-101]
+ _ = x[syncTopConstDecl-102]
+ _ = x[syncTopFuncDecl-103]
+ _ = x[syncTopTypeDecl-104]
+ _ = x[syncTopVarDecl-105]
+ _ = x[syncObject1-106]
+ _ = x[syncAddBody-107]
+ _ = x[syncLabel-108]
+ _ = x[syncFuncExt-109]
+ _ = x[syncMethExt-110]
+ _ = x[syncOptLabel-111]
+ _ = x[syncScalar-112]
+ _ = x[syncStmtDecls-113]
+ _ = x[syncDeclLocal-114]
+ _ = x[syncObjLocal-115]
+ _ = x[syncObjLocal1-116]
+ _ = x[syncDeclareLocal-117]
+ _ = x[syncPublic-118]
+ _ = x[syncPrivate-119]
+ _ = x[syncRelocs-120]
+ _ = x[syncReloc-121]
+ _ = x[syncUseReloc-122]
+ _ = x[syncVarExt-123]
+ _ = x[syncPkgDef-124]
+ _ = x[syncTypeExt-125]
+ _ = x[syncVal-126]
+ _ = x[syncCodeObj-127]
+ _ = x[syncPosBase-128]
+ _ = x[syncLocalIdent-129]
+ _ = x[syncTypeParamNames-130]
+ _ = x[syncTypeParamBounds-131]
+ _ = x[syncImplicitTypes-132]
+ _ = x[syncObjectName-133]
+}
+
+const _syncMarker_name = "NodeBoolInt64Uint64StringPosPkgSymSelectorKindTypeTypePkgSignatureParamOpObjectExprStmtDeclConstDeclFuncDeclTypeDeclVarDeclPragmaValueEOFMethodFuncBodyUseUseObjObjectIdxTypeIdxBOFEntryOpenScopeCloseScopeGlobalLocalDefineDefLocalUseLocalDefGlobalUseGlobalTypeParamsUseLabelDefLabelFuncLitCommonFuncBodyRefLinksymExtHackSetlinenoNameImportDeclDeclNamesDeclNameExprListExprsWrapnameTypeExprTypeExprOrNilChanDirParamsCloseAnotherScopeSumUnOpBinOpStructTypeInterfaceTypePacknameEmbeddedStmtsStmtsFallStmtFallBlockStmtIfStmtForStmtSwitchStmtRangeStmtCaseClauseCommClauseSelectStmtDeclsLabeledStmtCompLit1234NDefImplicitUseNameUseObjLocalAddLocalBothSignatureSetUnderlyingLinknameStmt1StmtsEndDeclareTopDeclsTopConstDeclTopFuncDeclTopTypeDeclTopVarDeclObject1AddBodyLabelFuncExtMethExtOptLabelScalarStmtDeclsDeclLocalObjLocalObjLocal1DeclareLocalPublicPrivateRelocsRelocUseRelocVarExtPkgDefTypeExtValCodeObjPosBaseLocalIdentTypeParamNamesTypeParamBoundsImplicitTypesObjectName"
+
+var _syncMarker_index = [...]uint16{0, 4, 8, 13, 19, 25, 28, 31, 34, 42, 46, 50, 57, 66, 71, 73, 79, 83, 87, 91, 100, 108, 116, 123, 129, 134, 137, 143, 151, 154, 160, 169, 176, 179, 184, 193, 203, 209, 214, 220, 228, 236, 245, 254, 264, 272, 280, 287, 297, 304, 314, 318, 327, 331, 341, 350, 358, 366, 371, 379, 387, 400, 407, 413, 430, 433, 437, 442, 452, 465, 473, 481, 486, 495, 503, 512, 518, 525, 535, 544, 554, 564, 574, 579, 590, 597, 598, 599, 600, 601, 602, 613, 620, 631, 639, 652, 665, 673, 678, 686, 693, 701, 713, 724, 735, 745, 752, 759, 764, 771, 778, 786, 792, 801, 810, 818, 827, 839, 845, 852, 858, 863, 871, 877, 883, 890, 893, 900, 907, 917, 931, 946, 959, 969}
+
+func (i syncMarker) String() string {
+ i -= 1
+ if i < 0 || i >= syncMarker(len(_syncMarker_index)-1) {
+ return "syncMarker(" + strconv.FormatInt(int64(i+1), 10) + ")"
+ }
+ return _syncMarker_name[_syncMarker_index[i]:_syncMarker_index[i+1]]
+}
diff --git a/src/cmd/compile/internal/noder/transform.go b/src/cmd/compile/internal/noder/transform.go
index 2859089e69..be8651d47b 100644
--- a/src/cmd/compile/internal/noder/transform.go
+++ b/src/cmd/compile/internal/noder/transform.go
@@ -85,7 +85,15 @@ func stringtoruneslit(n *ir.ConvExpr) ir.Node {
// etc. Corresponds to typecheck.tcConv.
func transformConv(n *ir.ConvExpr) ir.Node {
t := n.X.Type()
- op, _ := typecheck.Convertop(n.X.Op() == ir.OLITERAL, t, n.Type())
+ op, why := typecheck.Convertop(n.X.Op() == ir.OLITERAL, t, n.Type())
+ if op == ir.OXXX {
+ // types2 currently ignores pragmas, so a 'notinheap' mismatch is the
+ // one type-related error that it does not catch. This error will be
+ // caught here by Convertop (see two checks near beginning of
+ // Convertop) and reported at the end of noding.
+ base.ErrorfAt(n.Pos(), "cannot convert %L to type %v%s", n.X, n.Type(), why)
+ return n
+ }
n.SetOp(op)
switch n.Op() {
case ir.OCONVNOP:
@@ -122,7 +130,8 @@ func transformConvCall(n *ir.CallExpr) ir.Node {
}
// transformCall transforms a normal function/method call. Corresponds to last half
-// (non-conversion, non-builtin part) of typecheck.tcCall.
+// (non-conversion, non-builtin part) of typecheck.tcCall. This code should work even
+// in the case of OCALL/OFUNCINST.
func transformCall(n *ir.CallExpr) {
// n.Type() can be nil for calls with no return value
assert(n.Typecheck() == 1)
@@ -148,10 +157,11 @@ func transformCall(n *ir.CallExpr) {
n.SetOp(ir.OCALLFUNC)
}
- typecheckaste(ir.OCALL, n.X, n.IsDDD, t.Params(), n.Args)
+ typecheckaste(ir.OCALL, n.X, n.IsDDD, t.Params(), n.Args, false)
+ if l.Op() == ir.ODOTMETH && len(deref(n.X.Type().Recv().Type).RParams()) == 0 {
+ typecheck.FixMethodCall(n)
+ }
if t.NumResults() == 1 {
- n.SetType(l.Type().Results().Field(0).Type)
-
if n.Op() == ir.OCALLFUNC && n.X.Op() == ir.ONAME {
if sym := n.X.(*ir.Name).Sym(); types.IsRuntimePkg(sym.Pkg) && sym.Name == "getg" {
// Emit code for runtime.getg() directly instead of calling function.
@@ -185,7 +195,7 @@ func transformCompare(n *ir.BinaryExpr) {
aop, _ := typecheck.Assignop(lt, rt)
if aop != ir.OXXX {
types.CalcSize(lt)
- if rt.IsInterface() == lt.IsInterface() || lt.Width >= 1<<16 {
+ if lt.HasTParam() || rt.IsInterface() == lt.IsInterface() || lt.Width >= 1<<16 {
l = ir.NewConvExpr(base.Pos, aop, rt, l)
l.SetTypecheck(1)
}
@@ -198,7 +208,7 @@ func transformCompare(n *ir.BinaryExpr) {
aop, _ := typecheck.Assignop(rt, lt)
if aop != ir.OXXX {
types.CalcSize(rt)
- if rt.IsInterface() == lt.IsInterface() || rt.Width >= 1<<16 {
+ if rt.HasTParam() || rt.IsInterface() == lt.IsInterface() || rt.Width >= 1<<16 {
r = ir.NewConvExpr(base.Pos, aop, lt, r)
r.SetTypecheck(1)
}
@@ -303,6 +313,10 @@ assignOK:
r := r.(*ir.TypeAssertExpr)
stmt.SetOp(ir.OAS2DOTTYPE)
r.SetOp(ir.ODOTTYPE2)
+ case ir.ODYNAMICDOTTYPE:
+ r := r.(*ir.DynamicTypeAssertExpr)
+ stmt.SetOp(ir.OAS2DOTTYPE)
+ r.SetOp(ir.ODYNAMICDOTTYPE2)
default:
break assignOK
}
@@ -323,11 +337,22 @@ assignOK:
stmt := stmt.(*ir.AssignListStmt)
stmt.SetOp(ir.OAS2FUNC)
r := rhs[0].(*ir.CallExpr)
- r.Use = ir.CallUseList
rtyp := r.Type()
+ mismatched := false
+ failed := false
for i := range lhs {
- checkLHS(i, rtyp.Field(i).Type)
+ result := rtyp.Field(i).Type
+ checkLHS(i, result)
+
+ if lhs[i].Type() == nil || result == nil {
+ failed = true
+ } else if lhs[i] != ir.BlankNode && !types.Identical(lhs[i].Type(), result) {
+ mismatched = true
+ }
+ }
+ if mismatched && !failed {
+ typecheck.RewriteMultiValueCall(stmt, r)
}
return
}
@@ -340,12 +365,12 @@ assignOK:
}
}
-// Corresponds to typecheck.typecheckargs.
+// Corresponds to typecheck.typecheckargs. Really just deals with multi-value calls.
func transformArgs(n ir.InitNode) {
var list []ir.Node
switch n := n.(type) {
default:
- base.Fatalf("typecheckargs %+v", n.Op())
+ base.Fatalf("transformArgs %+v", n.Op())
case *ir.CallExpr:
list = n.Args
if n.IsDDD {
@@ -363,46 +388,13 @@ func transformArgs(n ir.InitNode) {
return
}
- // Rewrite f(g()) into t1, t2, ... = g(); f(t1, t2, ...).
-
// Save n as n.Orig for fmt.go.
if ir.Orig(n) == n {
n.(ir.OrigNode).SetOrig(ir.SepCopy(n))
}
- as := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
- as.Rhs.Append(list...)
-
- // If we're outside of function context, then this call will
- // be executed during the generated init function. However,
- // init.go hasn't yet created it. Instead, associate the
- // temporary variables with InitTodoFunc for now, and init.go
- // will reassociate them later when it's appropriate.
- static := ir.CurFunc == nil
- if static {
- ir.CurFunc = typecheck.InitTodoFunc
- }
- list = nil
- for _, f := range t.FieldSlice() {
- t := typecheck.Temp(f.Type)
- as.PtrInit().Append(ir.NewDecl(base.Pos, ir.ODCL, t))
- as.Lhs.Append(t)
- list = append(list, t)
- }
- if static {
- ir.CurFunc = nil
- }
-
- switch n := n.(type) {
- case *ir.CallExpr:
- n.Args = list
- case *ir.ReturnStmt:
- n.Results = list
- }
-
- transformAssign(as, as.Lhs, as.Rhs)
- as.SetTypecheck(1)
- n.PtrInit().Append(as)
+ // Rewrite f(g()) into t1, t2, ... = g(); f(t1, t2, ...).
+ typecheck.RewriteMultiValueCall(n, list[0])
}
// assignconvfn converts node n for assignment to type t. Corresponds to
@@ -416,7 +408,10 @@ func assignconvfn(n ir.Node, t *types.Type) ir.Node {
return n
}
- op, _ := typecheck.Assignop(n.Type(), t)
+ op, why := typecheck.Assignop(n.Type(), t)
+ if op == ir.OXXX {
+ base.Fatalf("found illegal assignment %+v -> %+v; %s", n.Type(), t, why)
+ }
r := ir.NewConvExpr(base.Pos, op, t, n)
r.SetTypecheck(1)
@@ -424,8 +419,11 @@ func assignconvfn(n ir.Node, t *types.Type) ir.Node {
return r
}
-// Corresponds to typecheck.typecheckaste.
-func typecheckaste(op ir.Op, call ir.Node, isddd bool, tstruct *types.Type, nl ir.Nodes) {
+// Corresponds to typecheck.typecheckaste, but we add an extra flag convifaceOnly
+// only. If convifaceOnly is true, we only do interface conversion. We use this to do
+// early insertion of CONVIFACE nodes during noder2, when the function or args may
+// have typeparams.
+func typecheckaste(op ir.Op, call ir.Node, isddd bool, tstruct *types.Type, nl ir.Nodes, convifaceOnly bool) {
var t *types.Type
var i int
@@ -444,7 +442,7 @@ func typecheckaste(op ir.Op, call ir.Node, isddd bool, tstruct *types.Type, nl i
if isddd {
n = nl[i]
ir.SetPos(n)
- if n.Type() != nil {
+ if n.Type() != nil && (!convifaceOnly || t.IsInterface()) {
nl[i] = assignconvfn(n, t)
}
return
@@ -454,7 +452,7 @@ func typecheckaste(op ir.Op, call ir.Node, isddd bool, tstruct *types.Type, nl i
for ; i < len(nl); i++ {
n = nl[i]
ir.SetPos(n)
- if n.Type() != nil {
+ if n.Type() != nil && (!convifaceOnly || t.IsInterface()) {
nl[i] = assignconvfn(n, t.Elem())
}
}
@@ -463,7 +461,7 @@ func typecheckaste(op ir.Op, call ir.Node, isddd bool, tstruct *types.Type, nl i
n = nl[i]
ir.SetPos(n)
- if n.Type() != nil {
+ if n.Type() != nil && (!convifaceOnly || t.IsInterface()) {
nl[i] = assignconvfn(n, t)
}
i++
@@ -485,7 +483,7 @@ func transformReturn(rs *ir.ReturnStmt) {
return
}
- typecheckaste(ir.ORETURN, nil, false, ir.CurFunc.Type().Results(), nl)
+ typecheckaste(ir.ORETURN, nil, false, ir.CurFunc.Type().Results(), nl, false)
}
// transformSelect transforms a select node, creating an assignment list as needed
@@ -537,13 +535,31 @@ func transformAsOp(n *ir.AssignOpStmt) {
}
// transformDot transforms an OXDOT (or ODOT) or ODOT, ODOTPTR, ODOTMETH,
-// ODOTINTER, or OCALLPART, as appropriate. It adds in extra nodes as needed to
+// ODOTINTER, or OMETHVALUE, as appropriate. It adds in extra nodes as needed to
// access embedded fields. Corresponds to typecheck.tcDot.
func transformDot(n *ir.SelectorExpr, isCall bool) ir.Node {
assert(n.Type() != nil && n.Typecheck() == 1)
if n.Op() == ir.OXDOT {
n = typecheck.AddImplicitDots(n)
n.SetOp(ir.ODOT)
+
+ // Set the Selection field and typecheck flag for any new ODOT nodes
+ // added by AddImplicitDots(), and also transform to ODOTPTR if
+ // needed. Equivalent to 'n.X = typecheck(n.X, ctxExpr|ctxType)' in
+ // tcDot.
+ for n1 := n; n1.X.Op() == ir.ODOT; {
+ n1 = n1.X.(*ir.SelectorExpr)
+ if !n1.Implicit() {
+ break
+ }
+ t1 := n1.X.Type()
+ if t1.IsPtr() && !t1.Elem().IsInterface() {
+ t1 = t1.Elem()
+ n1.SetOp(ir.ODOTPTR)
+ }
+ typecheck.Lookdot(n1, t1, 0)
+ n1.SetTypecheck(1)
+ }
}
t := n.X.Type()
@@ -561,8 +577,9 @@ func transformDot(n *ir.SelectorExpr, isCall bool) ir.Node {
assert(f != nil)
if (n.Op() == ir.ODOTINTER || n.Op() == ir.ODOTMETH) && !isCall {
- n.SetOp(ir.OCALLPART)
- n.SetType(typecheck.MethodValueWrapper(n).Type())
+ n.SetOp(ir.OMETHVALUE)
+ // This converts a method type to a function type. See issue 47775.
+ n.SetType(typecheck.NewMethodType(n.Type(), nil))
}
return n
}
@@ -594,7 +611,11 @@ func transformMethodExpr(n *ir.SelectorExpr) (res ir.Node) {
s := n.Sel
m := typecheck.Lookdot1(n, s, t, ms, 0)
- assert(m != nil)
+ if !t.HasShape() {
+ // It's OK to not find the method if t is instantiated by shape types,
+ // because we will use the methods on the generic type anyway.
+ assert(m != nil)
+ }
n.SetOp(ir.OMETHEXPR)
n.Selection = m
@@ -790,7 +811,10 @@ func transformBuiltin(n *ir.CallExpr) ir.Node {
return transformRealImag(u1.(*ir.UnaryExpr))
case ir.OPANIC:
return transformPanic(u1.(*ir.UnaryExpr))
- case ir.OCLOSE, ir.ONEW, ir.OALIGNOF, ir.OOFFSETOF, ir.OSIZEOF:
+ case ir.OALIGNOF, ir.OOFFSETOF, ir.OSIZEOF:
+ // This corresponds to the EvalConst() call near end of typecheck().
+ return typecheck.EvalConst(u1)
+ case ir.OCLOSE, ir.ONEW:
// nothing more to do
return u1
}
@@ -911,9 +935,7 @@ func transformCompLit(n *ir.CompLitExpr) (res ir.Node) {
f := t.Field(i)
n1 = assignconvfn(n1, f.Type)
- sk := ir.NewStructKeyExpr(base.Pos, f.Sym, n1)
- sk.Offset = f.Offset
- ls[i] = sk
+ ls[i] = ir.NewStructKeyExpr(base.Pos, f, n1)
}
assert(len(ls) >= t.NumFields())
} else {
@@ -922,33 +944,26 @@ func transformCompLit(n *ir.CompLitExpr) (res ir.Node) {
for i, l := range ls {
ir.SetPos(l)
- if l.Op() == ir.OKEY {
- kv := l.(*ir.KeyExpr)
- key := kv.Key
-
- // Sym might have resolved to name in other top-level
- // package, because of import dot. Redirect to correct sym
- // before we do the lookup.
- s := key.Sym()
- if id, ok := key.(*ir.Ident); ok && typecheck.DotImportRefs[id] != nil {
- s = typecheck.Lookup(s.Name)
- }
-
- // An OXDOT uses the Sym field to hold
- // the field to the right of the dot,
- // so s will be non-nil, but an OXDOT
- // is never a valid struct literal key.
- assert(!(s == nil || s.Pkg != types.LocalPkg || key.Op() == ir.OXDOT || s.IsBlank()))
+ kv := l.(*ir.KeyExpr)
+ key := kv.Key
- l = ir.NewStructKeyExpr(l.Pos(), s, kv.Value)
- ls[i] = l
+ // Sym might have resolved to name in other top-level
+ // package, because of import dot. Redirect to correct sym
+ // before we do the lookup.
+ s := key.Sym()
+ if id, ok := key.(*ir.Ident); ok && typecheck.DotImportRefs[id] != nil {
+ s = typecheck.Lookup(s.Name)
}
- assert(l.Op() == ir.OSTRUCTKEY)
- l := l.(*ir.StructKeyExpr)
+ // An OXDOT uses the Sym field to hold
+ // the field to the right of the dot,
+ // so s will be non-nil, but an OXDOT
+ // is never a valid struct literal key.
+ assert(!(s == nil || s.Pkg != types.LocalPkg || key.Op() == ir.OXDOT || s.IsBlank()))
- f := typecheck.Lookdot1(nil, l.Field, t, t.Fields(), 0)
- l.Offset = f.Offset
+ f := typecheck.Lookdot1(nil, s, t, t.Fields(), 0)
+ l := ir.NewStructKeyExpr(l.Pos(), f, kv.Value)
+ ls[i] = l
l.Value = assignconvfn(l.Value, f.Type)
}
diff --git a/src/cmd/compile/internal/noder/types.go b/src/cmd/compile/internal/noder/types.go
index 8680559a41..541ed68ef3 100644
--- a/src/cmd/compile/internal/noder/types.go
+++ b/src/cmd/compile/internal/noder/types.go
@@ -39,6 +39,11 @@ func (g *irgen) typ(typ types2.Type) *types.Type {
// recursive types have been fully constructed before we call CheckSize.
if res != nil && !res.IsUntyped() && !res.IsFuncArgStruct() && !res.HasTParam() {
types.CheckSize(res)
+ if res.IsPtr() {
+ // Pointers always have their size set, even though their element
+ // may not have its size set.
+ types.CheckSize(res.Elem())
+ }
}
return res
}
@@ -61,15 +66,19 @@ func (g *irgen) typ1(typ types2.Type) *types.Type {
// instTypeName2 creates a name for an instantiated type, base on the type args
// (given as types2 types).
-func instTypeName2(name string, targs []types2.Type) string {
+func instTypeName2(name string, targs *types2.TypeList) string {
b := bytes.NewBufferString(name)
b.WriteByte('[')
- for i, targ := range targs {
+ n := targs.Len()
+ for i := 0; i < n; i++ {
+ targ := targs.At(i)
if i > 0 {
b.WriteByte(',')
}
+ // Include package names for all types, including typeparams, to
+ // make sure type arguments are uniquely specified.
tname := types2.TypeString(targ,
- func(*types2.Package) string { return "" })
+ func(pkg *types2.Package) string { return pkg.Name() })
if strings.Index(tname, ", ") >= 0 {
// types2.TypeString puts spaces after a comma in a type
// list, but we don't want spaces in our actual type names
@@ -89,60 +98,62 @@ func (g *irgen) typ0(typ types2.Type) *types.Type {
case *types2.Basic:
return g.basic(typ)
case *types2.Named:
- if typ.TParams() != nil {
+ // If tparams is set, but targs is not, typ is a base generic
+ // type. typ is appearing as part of the source type of an alias,
+ // since that is the only use of a generic type that doesn't
+ // involve instantiation. We just translate the named type in the
+ // normal way below using g.obj().
+ if typ.TParams() != nil && typ.TArgs() != nil {
// typ is an instantiation of a defined (named) generic type.
// This instantiation should also be a defined (named) type.
// types2 gives us the substituted type in t.Underlying()
// The substituted type may or may not still have type
// params. We might, for example, be substituting one type
// param for another type param.
-
- if typ.TArgs() == nil {
- base.Fatalf("In typ0, Targs should be set if TParams is set")
- }
-
- // When converted to types.Type, typ must have a name,
- // based on the names of the type arguments. We need a
- // name to deal with recursive generic types (and it also
- // looks better when printing types).
+ //
+ // When converted to types.Type, typ has a unique name,
+ // based on the names of the type arguments.
instName := instTypeName2(typ.Obj().Name(), typ.TArgs())
s := g.pkg(typ.Obj().Pkg()).Lookup(instName)
if s.Def != nil {
- // We have already encountered this instantiation,
- // so use the type we previously created, since there
+ // We have already encountered this instantiation.
+ // Use the type we previously created, since there
// must be exactly one instance of a defined type.
return s.Def.Type()
}
// Create a forwarding type first and put it in the g.typs
- // map, in order to deal with recursive generic types.
- // Fully set up the extra ntyp information (Def, RParams,
- // which may set HasTParam) before translating the
- // underlying type itself, so we handle recursion
- // correctly, including via method signatures.
- ntyp := newIncompleteNamedType(g.pos(typ.Obj().Pos()), s)
+ // map, in order to deal with recursive generic types
+ // (including via method signatures).. Set up the extra
+ // ntyp information (Def, RParams, which may set
+ // HasTParam) before translating the underlying type
+ // itself, so we handle recursion correctly.
+ ntyp := typecheck.NewIncompleteNamedType(g.pos(typ.Obj().Pos()), s)
g.typs[typ] = ntyp
// If ntyp still has type params, then we must be
// referencing something like 'value[T2]', as when
- // specifying the generic receiver of a method,
- // where value was defined as "type value[T any]
- // ...". Save the type args, which will now be the
- // new type of the current type.
+ // specifying the generic receiver of a method, where
+ // value was defined as "type value[T any] ...". Save the
+ // type args, which will now be the new typeparams of the
+ // current type.
//
// If ntyp does not have type params, we are saving the
- // concrete types used to instantiate this type. We'll use
- // these when instantiating the methods of the
+ // non-generic types used to instantiate this type. We'll
+ // use these when instantiating the methods of the
// instantiated type.
- rparams := make([]*types.Type, len(typ.TArgs()))
- for i, targ := range typ.TArgs() {
- rparams[i] = g.typ1(targ)
+ targs := typ.TArgs()
+ rparams := make([]*types.Type, targs.Len())
+ for i := range rparams {
+ rparams[i] = g.typ1(targs.At(i))
}
ntyp.SetRParams(rparams)
//fmt.Printf("Saw new type %v %v\n", instName, ntyp.HasTParam())
ntyp.SetUnderlying(g.typ1(typ.Underlying()))
g.fillinMethods(typ, ntyp)
+ // Save the symbol for the base generic type.
+ ntyp.OrigSym = g.pkg(typ.Obj().Pkg()).Lookup(typ.Obj().Name())
return ntyp
}
obj := g.obj(typ.Obj())
@@ -183,12 +194,9 @@ func (g *irgen) typ0(typ types2.Type) *types.Type {
for i := range embeddeds {
// TODO(mdempsky): Get embedding position.
e := typ.EmbeddedType(i)
- if t := types2.AsInterface(e); t != nil && t.IsComparable() {
- // Ignore predefined type 'comparable', since it
- // doesn't resolve and it doesn't have any
- // relevant methods.
- continue
- }
+
+ // With Go 1.18, an embedded element can be any type, not
+ // just an interface.
embeddeds[j] = types.NewField(src.NoXPos, nil, g.typ1(e))
j++
}
@@ -204,20 +212,39 @@ func (g *irgen) typ0(typ types2.Type) *types.Type {
return types.NewInterface(g.tpkg(typ), append(embeddeds, methods...))
case *types2.TypeParam:
- tp := types.NewTypeParam(g.tpkg(typ))
// Save the name of the type parameter in the sym of the type.
// Include the types2 subscript in the sym name
- sym := g.pkg(typ.Obj().Pkg()).Lookup(types2.TypeString(typ, func(*types2.Package) string { return "" }))
- tp.SetSym(sym)
+ pkg := g.tpkg(typ)
+ sym := pkg.Lookup(types2.TypeString(typ, func(*types2.Package) string { return "" }))
+ if sym.Def != nil {
+ // Make sure we use the same type param type for the same
+ // name, whether it is created during types1-import or
+ // this types2-to-types1 translation.
+ return sym.Def.Type()
+ }
+ tp := types.NewTypeParam(sym, typ.Index())
+ nname := ir.NewDeclNameAt(g.pos(typ.Obj().Pos()), ir.OTYPE, sym)
+ sym.Def = nname
+ nname.SetType(tp)
+ tp.SetNod(nname)
// Set g.typs[typ] in case the bound methods reference typ.
g.typs[typ] = tp
- // TODO(danscales): we don't currently need to use the bounds
- // anywhere, so eventually we can probably remove.
- bound := g.typ1(typ.Bound())
- *tp.Methods() = *bound.Methods()
+ bound := g.typ1(typ.Constraint())
+ tp.SetBound(bound)
return tp
+ case *types2.Union:
+ nt := typ.Len()
+ tlist := make([]*types.Type, nt)
+ tildes := make([]bool, nt)
+ for i := range tlist {
+ t := typ.Term(i)
+ tlist[i] = g.typ1(t.Type())
+ tildes[i] = t.Tilde()
+ }
+ return types.NewUnion(tlist, tildes)
+
case *types2.Tuple:
// Tuples are used for the type of a function call (i.e. the
// return value of the function).
@@ -243,19 +270,24 @@ func (g *irgen) typ0(typ types2.Type) *types.Type {
// and for actually generating the methods for instantiated types.
func (g *irgen) fillinMethods(typ *types2.Named, ntyp *types.Type) {
if typ.NumMethods() != 0 {
- targs := make([]ir.Node, len(typ.TArgs()))
- for i, targ := range typ.TArgs() {
- targs[i] = ir.TypeNode(g.typ1(targ))
+ targs2 := typ.TArgs()
+ targs := make([]*types.Type, targs2.Len())
+ for i := range targs {
+ targs[i] = g.typ1(targs2.At(i))
}
methods := make([]*types.Field, typ.NumMethods())
for i := range methods {
m := typ.Method(i)
- meth := g.obj(m)
- recvType := types2.AsSignature(m.Type()).Recv().Type()
- ptr := types2.AsPointer(recvType)
- if ptr != nil {
- recvType = ptr.Elem()
+ recvType := deref2(types2.AsSignature(m.Type()).Recv().Type())
+ var meth *ir.Name
+ if m.Pkg() != g.self {
+ // Imported methods cannot be loaded by name (what
+ // g.obj() does) - they must be loaded via their
+ // type.
+ meth = g.obj(recvType.(*types2.Named).Obj()).Type().Methods().Index(i).Nname.(*ir.Name)
+ } else {
+ meth = g.obj(m)
}
if recvType != types2.Type(typ) {
// Unfortunately, meth is the type of the method of the
@@ -276,18 +308,21 @@ func (g *irgen) fillinMethods(typ *types2.Named, ntyp *types.Type) {
} else {
meth2 = ir.NewNameAt(meth.Pos(), newsym)
rparams := types2.AsSignature(m.Type()).RParams()
- tparams := make([]*types.Field, len(rparams))
- for i, rparam := range rparams {
- tparams[i] = types.NewField(src.NoXPos, nil, g.typ1(rparam.Type()))
+ tparams := make([]*types.Type, rparams.Len())
+ for i := range tparams {
+ tparams[i] = g.typ1(rparams.At(i))
}
assert(len(tparams) == len(targs))
- subst := &subster{
- g: g,
- tparams: tparams,
- targs: targs,
+ ts := typecheck.Tsubster{
+ Tparams: tparams,
+ Targs: targs,
}
// Do the substitution of the type
- meth2.SetType(subst.typ(meth.Type()))
+ meth2.SetType(ts.Typ(meth.Type()))
+ // Add any new fully instantiated types
+ // seen during the substitution to
+ // g.instTypeList.
+ g.instTypeList = append(g.instTypeList, ts.InstTypeList...)
newsym.Def = meth2
}
meth = meth2
@@ -296,7 +331,7 @@ func (g *irgen) fillinMethods(typ *types2.Named, ntyp *types.Type) {
methods[i].Nname = meth
}
ntyp.Methods().Set(methods)
- if !ntyp.HasTParam() {
+ if !ntyp.HasTParam() && !ntyp.HasShape() {
// Generate all the methods for a new fully-instantiated type.
g.instTypeList = append(g.instTypeList, ntyp)
}
@@ -305,9 +340,9 @@ func (g *irgen) fillinMethods(typ *types2.Named, ntyp *types.Type) {
func (g *irgen) signature(recv *types.Field, sig *types2.Signature) *types.Type {
tparams2 := sig.TParams()
- tparams := make([]*types.Field, len(tparams2))
+ tparams := make([]*types.Field, tparams2.Len())
for i := range tparams {
- tp := tparams2[i]
+ tp := tparams2.At(i).Obj()
tparams[i] = types.NewField(g.pos(tp), g.sym(tp), g.typ1(tp.Type()))
}
@@ -346,7 +381,7 @@ func (g *irgen) selector(obj types2.Object) *types.Sym {
return pkg.Lookup(name)
}
-// tpkg returns the package that a function, interface, or struct type
+// tpkg returns the package that a function, interface, struct, or typeparam type
// expression appeared in.
//
// Caveat: For the degenerate types "func()", "interface{}", and
@@ -356,36 +391,39 @@ func (g *irgen) selector(obj types2.Object) *types.Sym {
// particular types is because go/types does *not* report it for
// them. So in practice this limitation is probably moot.
func (g *irgen) tpkg(typ types2.Type) *types.Pkg {
- anyObj := func() types2.Object {
- switch typ := typ.(type) {
- case *types2.Signature:
- if recv := typ.Recv(); recv != nil {
- return recv
- }
- if params := typ.Params(); params.Len() > 0 {
- return params.At(0)
- }
- if results := typ.Results(); results.Len() > 0 {
- return results.At(0)
- }
- case *types2.Struct:
- if typ.NumFields() > 0 {
- return typ.Field(0)
- }
- case *types2.Interface:
- if typ.NumExplicitMethods() > 0 {
- return typ.ExplicitMethod(0)
- }
- }
- return nil
- }
-
- if obj := anyObj(); obj != nil {
+ if obj := anyObj(typ); obj != nil {
return g.pkg(obj.Pkg())
}
return types.LocalPkg
}
+// anyObj returns some object accessible from typ, if any.
+func anyObj(typ types2.Type) types2.Object {
+ switch typ := typ.(type) {
+ case *types2.Signature:
+ if recv := typ.Recv(); recv != nil {
+ return recv
+ }
+ if params := typ.Params(); params.Len() > 0 {
+ return params.At(0)
+ }
+ if results := typ.Results(); results.Len() > 0 {
+ return results.At(0)
+ }
+ case *types2.Struct:
+ if typ.NumFields() > 0 {
+ return typ.Field(0)
+ }
+ case *types2.Interface:
+ if typ.NumExplicitMethods() > 0 {
+ return typ.ExplicitMethod(0)
+ }
+ case *types2.TypeParam:
+ return typ.Obj()
+ }
+ return nil
+}
+
func (g *irgen) basic(typ *types2.Basic) *types.Type {
switch typ.Name() {
case "byte":
@@ -430,3 +468,11 @@ var dirs = [...]types.ChanDir{
types2.SendOnly: types.Csend,
types2.RecvOnly: types.Crecv,
}
+
+// deref2 does a single deref of types2 type t, if it is a pointer type.
+func deref2(t types2.Type) types2.Type {
+ if ptr := types2.AsPointer(t); ptr != nil {
+ t = ptr.Elem()
+ }
+ return t
+}
diff --git a/src/cmd/compile/internal/noder/unified.go b/src/cmd/compile/internal/noder/unified.go
new file mode 100644
index 0000000000..9f80ca000d
--- /dev/null
+++ b/src/cmd/compile/internal/noder/unified.go
@@ -0,0 +1,340 @@
+// UNREVIEWED
+
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package noder
+
+import (
+ "bytes"
+ "fmt"
+ "internal/goversion"
+ "io"
+ "runtime"
+ "sort"
+
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/inline"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/typecheck"
+ "cmd/compile/internal/types"
+ "cmd/compile/internal/types2"
+ "cmd/internal/src"
+)
+
+// localPkgReader holds the package reader used for reading the local
+// package. It exists so the unified IR linker can refer back to it
+// later.
+var localPkgReader *pkgReader
+
+// unified construct the local package's IR from syntax's AST.
+//
+// The pipeline contains 2 steps:
+//
+// (1) Generate package export data "stub".
+//
+// (2) Generate package IR from package export data.
+//
+// The package data "stub" at step (1) contains everything from the local package,
+// but nothing that have been imported. When we're actually writing out export data
+// to the output files (see writeNewExport function), we run the "linker", which does
+// a few things:
+//
+// + Updates compiler extensions data (e.g., inlining cost, escape analysis results).
+//
+// + Handles re-exporting any transitive dependencies.
+//
+// + Prunes out any unnecessary details (e.g., non-inlineable functions, because any
+// downstream importers only care about inlinable functions).
+//
+// The source files are typechecked twice, once before writing export data
+// using types2 checker, once after read export data using gc/typecheck.
+// This duplication of work will go away once we always use types2 checker,
+// we can remove the gc/typecheck pass. The reason it is still here:
+//
+// + It reduces engineering costs in maintaining a fork of typecheck
+// (e.g., no need to backport fixes like CL 327651).
+//
+// + It makes it easier to pass toolstash -cmp.
+//
+// + Historically, we would always re-run the typechecker after import, even though
+// we know the imported data is valid. It's not ideal, but also not causing any
+// problem either.
+//
+// + There's still transformation that being done during gc/typecheck, like rewriting
+// multi-valued function call, or transform ir.OINDEX -> ir.OINDEXMAP.
+//
+// Using syntax+types2 tree, which already has a complete representation of generics,
+// the unified IR has the full typed AST for doing introspection during step (1).
+// In other words, we have all necessary information to build the generic IR form
+// (see writer.captureVars for an example).
+func unified(noders []*noder) {
+ inline.NewInline = InlineCall
+
+ if !quirksMode() {
+ writeNewExportFunc = writeNewExport
+ } else if base.Flag.G != 0 {
+ base.Errorf("cannot use -G and -d=quirksmode together")
+ }
+
+ newReadImportFunc = func(data string, pkg1 *types.Pkg, check *types2.Checker, packages map[string]*types2.Package) (pkg2 *types2.Package, err error) {
+ pr := newPkgDecoder(pkg1.Path, data)
+
+ // Read package descriptors for both types2 and compiler backend.
+ readPackage(newPkgReader(pr), pkg1)
+ pkg2 = readPackage2(check, packages, pr)
+ return
+ }
+
+ data := writePkgStub(noders)
+
+ // We already passed base.Flag.Lang to types2 to handle validating
+ // the user's source code. Bump it up now to the current version and
+ // re-parse, so typecheck doesn't complain if we construct IR that
+ // utilizes newer Go features.
+ base.Flag.Lang = fmt.Sprintf("go1.%d", goversion.Version)
+ types.ParseLangFlag()
+
+ assert(types.LocalPkg.Path == "")
+ types.LocalPkg.Height = 0 // reset so pkgReader.pkgIdx doesn't complain
+ target := typecheck.Target
+
+ typecheck.TypecheckAllowed = true
+
+ localPkgReader = newPkgReader(newPkgDecoder(types.LocalPkg.Path, data))
+ readPackage(localPkgReader, types.LocalPkg)
+
+ r := localPkgReader.newReader(relocMeta, privateRootIdx, syncPrivate)
+ r.ext = r
+ r.pkgInit(types.LocalPkg, target)
+
+ // Type-check any top-level assignments. We ignore non-assignments
+ // here because other declarations are typechecked as they're
+ // constructed.
+ for i, ndecls := 0, len(target.Decls); i < ndecls; i++ {
+ switch n := target.Decls[i]; n.Op() {
+ case ir.OAS, ir.OAS2:
+ target.Decls[i] = typecheck.Stmt(n)
+ }
+ }
+
+ // Don't use range--bodyIdx can add closures to todoBodies.
+ for len(todoBodies) > 0 {
+ // The order we expand bodies doesn't matter, so pop from the end
+ // to reduce todoBodies reallocations if it grows further.
+ fn := todoBodies[len(todoBodies)-1]
+ todoBodies = todoBodies[:len(todoBodies)-1]
+
+ pri, ok := bodyReader[fn]
+ assert(ok)
+ pri.funcBody(fn)
+
+ // Instantiated generic function: add to Decls for typechecking
+ // and compilation.
+ if fn.OClosure == nil && len(pri.dict.targs) != 0 {
+ target.Decls = append(target.Decls, fn)
+ }
+ }
+ todoBodies = nil
+
+ if !quirksMode() {
+ // TODO(mdempsky): Investigate generating wrappers in quirks mode too.
+ r.wrapTypes(target)
+ }
+
+ // Check that nothing snuck past typechecking.
+ for _, n := range target.Decls {
+ if n.Typecheck() == 0 {
+ base.FatalfAt(n.Pos(), "missed typecheck: %v", n)
+ }
+
+ // For functions, check that at least their first statement (if
+ // any) was typechecked too.
+ if fn, ok := n.(*ir.Func); ok && len(fn.Body) != 0 {
+ if stmt := fn.Body[0]; stmt.Typecheck() == 0 {
+ base.FatalfAt(stmt.Pos(), "missed typecheck: %v", stmt)
+ }
+ }
+ }
+
+ base.ExitIfErrors() // just in case
+}
+
+// writePkgStub type checks the given parsed source files,
+// writes an export data package stub representing them,
+// and returns the result.
+func writePkgStub(noders []*noder) string {
+ m, pkg, info := checkFiles(noders)
+
+ pw := newPkgWriter(m, pkg, info)
+
+ pw.collectDecls(noders)
+
+ publicRootWriter := pw.newWriter(relocMeta, syncPublic)
+ privateRootWriter := pw.newWriter(relocMeta, syncPrivate)
+
+ assert(publicRootWriter.idx == publicRootIdx)
+ assert(privateRootWriter.idx == privateRootIdx)
+
+ {
+ w := publicRootWriter
+ w.pkg(pkg)
+ w.bool(false) // has init; XXX
+
+ scope := pkg.Scope()
+ names := scope.Names()
+ w.len(len(names))
+ for _, name := range scope.Names() {
+ w.obj(scope.Lookup(name), nil)
+ }
+
+ w.sync(syncEOF)
+ w.flush()
+ }
+
+ {
+ w := privateRootWriter
+ w.ext = w
+ w.pkgInit(noders)
+ w.flush()
+ }
+
+ var sb bytes.Buffer // TODO(mdempsky): strings.Builder after #44505 is resolved
+ pw.dump(&sb)
+
+ // At this point, we're done with types2. Make sure the package is
+ // garbage collected.
+ freePackage(pkg)
+
+ return sb.String()
+}
+
+// freePackage ensures the given package is garbage collected.
+func freePackage(pkg *types2.Package) {
+ // The GC test below relies on a precise GC that runs finalizers as
+ // soon as objects are unreachable. Our implementation provides
+ // this, but other/older implementations may not (e.g., Go 1.4 does
+ // not because of #22350). To avoid imposing unnecessary
+ // restrictions on the GOROOT_BOOTSTRAP toolchain, we skip the test
+ // during bootstrapping.
+ if base.CompilerBootstrap {
+ return
+ }
+
+ // Set a finalizer on pkg so we can detect if/when it's collected.
+ done := make(chan struct{})
+ runtime.SetFinalizer(pkg, func(*types2.Package) { close(done) })
+
+ // Important: objects involved in cycles are not finalized, so zero
+ // out pkg to break its cycles and allow the finalizer to run.
+ *pkg = types2.Package{}
+
+ // It typically takes just 1 or 2 cycles to release pkg, but it
+ // doesn't hurt to try a few more times.
+ for i := 0; i < 10; i++ {
+ select {
+ case <-done:
+ return
+ default:
+ runtime.GC()
+ }
+ }
+
+ base.Fatalf("package never finalized")
+}
+
+func readPackage(pr *pkgReader, importpkg *types.Pkg) {
+ r := pr.newReader(relocMeta, publicRootIdx, syncPublic)
+
+ pkg := r.pkg()
+ assert(pkg == importpkg)
+
+ if r.bool() {
+ sym := pkg.Lookup(".inittask")
+ task := ir.NewNameAt(src.NoXPos, sym)
+ task.Class = ir.PEXTERN
+ sym.Def = task
+ }
+
+ for i, n := 0, r.len(); i < n; i++ {
+ r.sync(syncObject)
+ assert(!r.bool())
+ idx := r.reloc(relocObj)
+ assert(r.len() == 0)
+
+ path, name, code := r.p.peekObj(idx)
+ if code != objStub {
+ objReader[types.NewPkg(path, "").Lookup(name)] = pkgReaderIndex{pr, idx, nil}
+ }
+ }
+}
+
+func writeNewExport(out io.Writer) {
+ l := linker{
+ pw: newPkgEncoder(),
+
+ pkgs: make(map[string]int),
+ decls: make(map[*types.Sym]int),
+ }
+
+ publicRootWriter := l.pw.newEncoder(relocMeta, syncPublic)
+ assert(publicRootWriter.idx == publicRootIdx)
+
+ var selfPkgIdx int
+
+ {
+ pr := localPkgReader
+ r := pr.newDecoder(relocMeta, publicRootIdx, syncPublic)
+
+ r.sync(syncPkg)
+ selfPkgIdx = l.relocIdx(pr, relocPkg, r.reloc(relocPkg))
+
+ r.bool() // has init
+
+ for i, n := 0, r.len(); i < n; i++ {
+ r.sync(syncObject)
+ assert(!r.bool())
+ idx := r.reloc(relocObj)
+ assert(r.len() == 0)
+
+ xpath, xname, xtag := pr.peekObj(idx)
+ assert(xpath == pr.pkgPath)
+ assert(xtag != objStub)
+
+ if types.IsExported(xname) {
+ l.relocIdx(pr, relocObj, idx)
+ }
+ }
+
+ r.sync(syncEOF)
+ }
+
+ {
+ var idxs []int
+ for _, idx := range l.decls {
+ idxs = append(idxs, idx)
+ }
+ sort.Ints(idxs)
+
+ w := publicRootWriter
+
+ w.sync(syncPkg)
+ w.reloc(relocPkg, selfPkgIdx)
+
+ w.bool(typecheck.Lookup(".inittask").Def != nil)
+
+ w.len(len(idxs))
+ for _, idx := range idxs {
+ w.sync(syncObject)
+ w.bool(false)
+ w.reloc(relocObj, idx)
+ w.len(0)
+ }
+
+ w.sync(syncEOF)
+ w.flush()
+ }
+
+ l.pw.dump(out)
+}
diff --git a/src/cmd/compile/internal/noder/unified_test.go b/src/cmd/compile/internal/noder/unified_test.go
new file mode 100644
index 0000000000..96cc66f775
--- /dev/null
+++ b/src/cmd/compile/internal/noder/unified_test.go
@@ -0,0 +1,153 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package noder_test
+
+import (
+ "encoding/json"
+ "flag"
+ exec "internal/execabs"
+ "os"
+ "reflect"
+ "runtime"
+ "strings"
+ "testing"
+)
+
+var (
+ flagPkgs = flag.String("pkgs", "std", "list of packages to compare (ignored in -short mode)")
+ flagAll = flag.Bool("all", false, "enable testing of all GOOS/GOARCH targets")
+ flagParallel = flag.Bool("parallel", false, "test GOOS/GOARCH targets in parallel")
+)
+
+// TestUnifiedCompare implements a test similar to running:
+//
+// $ go build -toolexec="toolstash -cmp" std
+//
+// The -pkgs flag controls the list of packages tested.
+//
+// By default, only the native GOOS/GOARCH target is enabled. The -all
+// flag enables testing of non-native targets. The -parallel flag
+// additionally enables testing of targets in parallel.
+//
+// Caution: Testing all targets is very resource intensive! On an IBM
+// P920 (dual Intel Xeon Gold 6154 CPUs; 36 cores, 192GB RAM), testing
+// all targets in parallel takes about 5 minutes. Using the 'go test'
+// command's -run flag for subtest matching is recommended for less
+// powerful machines.
+func TestUnifiedCompare(t *testing.T) {
+ targets, err := exec.Command("go", "tool", "dist", "list").Output()
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ for _, target := range strings.Fields(string(targets)) {
+ t.Run(target, func(t *testing.T) {
+ parts := strings.Split(target, "/")
+ goos, goarch := parts[0], parts[1]
+
+ if !(*flagAll || goos == runtime.GOOS && goarch == runtime.GOARCH) {
+ t.Skip("skipping non-native target (use -all to enable)")
+ }
+ if *flagParallel {
+ t.Parallel()
+ }
+
+ pkgs1 := loadPackages(t, goos, goarch, "-d=unified=0 -d=inlfuncswithclosures=0 -d=unifiedquirks=1 -G=0")
+ pkgs2 := loadPackages(t, goos, goarch, "-d=unified=1 -d=inlfuncswithclosures=0 -d=unifiedquirks=1 -G=0")
+
+ if len(pkgs1) != len(pkgs2) {
+ t.Fatalf("length mismatch: %v != %v", len(pkgs1), len(pkgs2))
+ }
+
+ for i := range pkgs1 {
+ pkg1 := pkgs1[i]
+ pkg2 := pkgs2[i]
+
+ path := pkg1.ImportPath
+ if path != pkg2.ImportPath {
+ t.Fatalf("mismatched paths: %q != %q", path, pkg2.ImportPath)
+ }
+
+ // Packages that don't have any source files (e.g., packages
+ // unsafe, embed/internal/embedtest, and cmd/internal/moddeps).
+ if pkg1.Export == "" && pkg2.Export == "" {
+ continue
+ }
+
+ if pkg1.BuildID == pkg2.BuildID {
+ t.Errorf("package %q: build IDs unexpectedly matched", path)
+ }
+
+ // Unlike toolstash -cmp, we're comparing the same compiler
+ // binary against itself, just with different flags. So we
+ // don't need to worry about skipping over mismatched version
+ // strings, but we do need to account for differing build IDs.
+ //
+ // Fortunately, build IDs are cryptographic 256-bit hashes,
+ // and cmd/go provides us with them up front. So we can just
+ // use them as delimeters to split the files, and then check
+ // that the substrings are all equal.
+ file1 := strings.Split(readFile(t, pkg1.Export), pkg1.BuildID)
+ file2 := strings.Split(readFile(t, pkg2.Export), pkg2.BuildID)
+ if !reflect.DeepEqual(file1, file2) {
+ t.Errorf("package %q: compile output differs", path)
+ }
+ }
+ })
+ }
+}
+
+type pkg struct {
+ ImportPath string
+ Export string
+ BuildID string
+ Incomplete bool
+}
+
+func loadPackages(t *testing.T, goos, goarch, gcflags string) []pkg {
+ args := []string{"list", "-e", "-export", "-json", "-gcflags=all=" + gcflags, "--"}
+ if testing.Short() {
+ t.Log("short testing mode; only testing package runtime")
+ args = append(args, "runtime")
+ } else {
+ args = append(args, strings.Fields(*flagPkgs)...)
+ }
+
+ cmd := exec.Command("go", args...)
+ cmd.Env = append(os.Environ(), "GOOS="+goos, "GOARCH="+goarch)
+ cmd.Stderr = os.Stderr
+ t.Logf("running %v", cmd)
+ stdout, err := cmd.StdoutPipe()
+ if err != nil {
+ t.Fatal(err)
+ }
+ if err := cmd.Start(); err != nil {
+ t.Fatal(err)
+ }
+
+ var res []pkg
+ for dec := json.NewDecoder(stdout); dec.More(); {
+ var pkg pkg
+ if err := dec.Decode(&pkg); err != nil {
+ t.Fatal(err)
+ }
+ if pkg.Incomplete {
+ t.Fatalf("incomplete package: %q", pkg.ImportPath)
+ }
+ res = append(res, pkg)
+ }
+ if err := cmd.Wait(); err != nil {
+ t.Fatal(err)
+ }
+ return res
+}
+
+func readFile(t *testing.T, name string) string {
+ buf, err := os.ReadFile(name)
+ if err != nil {
+ t.Fatal(err)
+ }
+ return string(buf)
+}
diff --git a/src/cmd/compile/internal/noder/validate.go b/src/cmd/compile/internal/noder/validate.go
index b926222c89..dcacae7480 100644
--- a/src/cmd/compile/internal/noder/validate.go
+++ b/src/cmd/compile/internal/noder/validate.go
@@ -55,7 +55,15 @@ func (g *irgen) validate(n syntax.Node) {
case *syntax.CallExpr:
tv := g.info.Types[n.Fun]
if tv.IsBuiltin() {
- switch builtin := n.Fun.(type) {
+ fun := n.Fun
+ for {
+ builtin, ok := fun.(*syntax.ParenExpr)
+ if !ok {
+ break
+ }
+ fun = builtin.X
+ }
+ switch builtin := fun.(type) {
case *syntax.Name:
g.validateBuiltin(builtin.Value, n)
case *syntax.SelectorExpr:
@@ -73,7 +81,16 @@ func (g *irgen) validateBuiltin(name string, call *syntax.CallExpr) {
// Check that types2+gcSizes calculates sizes the same
// as cmd/compile does.
- got, ok := constant.Int64Val(g.info.Types[call].Value)
+ tv := g.info.Types[call]
+ if !tv.IsValue() {
+ base.FatalfAt(g.pos(call), "expected a value")
+ }
+
+ if tv.Value == nil {
+ break // unsafe op is not a constant, so no further validation
+ }
+
+ got, ok := constant.Int64Val(tv.Value)
if !ok {
base.FatalfAt(g.pos(call), "expected int64 constant value")
}
diff --git a/src/cmd/compile/internal/noder/writer.go b/src/cmd/compile/internal/noder/writer.go
new file mode 100644
index 0000000000..1405c77161
--- /dev/null
+++ b/src/cmd/compile/internal/noder/writer.go
@@ -0,0 +1,1882 @@
+// UNREVIEWED
+
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package noder
+
+import (
+ "fmt"
+ "go/constant"
+
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/syntax"
+ "cmd/compile/internal/types2"
+)
+
+type pkgWriter struct {
+ pkgEncoder
+
+ m posMap
+ curpkg *types2.Package
+ info *types2.Info
+
+ posBasesIdx map[*syntax.PosBase]int
+ pkgsIdx map[*types2.Package]int
+ typsIdx map[types2.Type]int
+ globalsIdx map[types2.Object]int
+
+ funDecls map[*types2.Func]*syntax.FuncDecl
+ typDecls map[*types2.TypeName]typeDeclGen
+
+ linknames map[types2.Object]string
+ cgoPragmas [][]string
+
+ dups dupTypes
+}
+
+func newPkgWriter(m posMap, pkg *types2.Package, info *types2.Info) *pkgWriter {
+ return &pkgWriter{
+ pkgEncoder: newPkgEncoder(),
+
+ m: m,
+ curpkg: pkg,
+ info: info,
+
+ pkgsIdx: make(map[*types2.Package]int),
+ globalsIdx: make(map[types2.Object]int),
+ typsIdx: make(map[types2.Type]int),
+
+ posBasesIdx: make(map[*syntax.PosBase]int),
+
+ funDecls: make(map[*types2.Func]*syntax.FuncDecl),
+ typDecls: make(map[*types2.TypeName]typeDeclGen),
+
+ linknames: make(map[types2.Object]string),
+ }
+}
+
+func (pw *pkgWriter) errorf(p poser, msg string, args ...interface{}) {
+ base.ErrorfAt(pw.m.pos(p), msg, args...)
+}
+
+func (pw *pkgWriter) fatalf(p poser, msg string, args ...interface{}) {
+ base.FatalfAt(pw.m.pos(p), msg, args...)
+}
+
+func (pw *pkgWriter) unexpected(what string, p poser) {
+ pw.fatalf(p, "unexpected %s: %v (%T)", what, p, p)
+}
+
+type writer struct {
+ p *pkgWriter
+
+ encoder
+
+ // For writing out object descriptions, ext points to the extension
+ // writer for where we can write the compiler's private extension
+ // details for the object.
+ //
+ // TODO(mdempsky): This is a little hacky, but works easiest with
+ // the way things are currently.
+ ext *writer
+
+ // TODO(mdempsky): We should be able to prune localsIdx whenever a
+ // scope closes, and then maybe we can just use the same map for
+ // storing the TypeParams too (as their TypeName instead).
+
+ // variables declared within this function
+ localsIdx map[*types2.Var]int
+
+ closureVars []posObj
+ closureVarsIdx map[*types2.Var]int
+
+ dict *writerDict
+ derived bool
+}
+
+// A writerDict tracks types and objects that are used by a declaration.
+type writerDict struct {
+ implicits []*types2.TypeName
+
+ // derived is a slice of type indices for computing derived types
+ // (i.e., types that depend on the declaration's type parameters).
+ derived []derivedInfo
+
+ // derivedIdx maps a Type to its corresponding index within the
+ // derived slice, if present.
+ derivedIdx map[types2.Type]int
+
+ // funcs lists references to generic functions that were
+ // instantiated with derived types (i.e., that require
+ // sub-dictionaries when called at run time).
+ funcs []objInfo
+}
+
+type derivedInfo struct {
+ idx int
+ needed bool
+}
+
+type typeInfo struct {
+ idx int
+ derived bool
+}
+
+type objInfo struct {
+ idx int // index for the generic function declaration
+ explicits []typeInfo // info for the type arguments
+}
+
+func (info objInfo) anyDerived() bool {
+ for _, explicit := range info.explicits {
+ if explicit.derived {
+ return true
+ }
+ }
+ return false
+}
+
+func (info objInfo) equals(other objInfo) bool {
+ if info.idx != other.idx {
+ return false
+ }
+ assert(len(info.explicits) == len(other.explicits))
+ for i, targ := range info.explicits {
+ if targ != other.explicits[i] {
+ return false
+ }
+ }
+ return true
+}
+
+func (pw *pkgWriter) newWriter(k reloc, marker syncMarker) *writer {
+ return &writer{
+ encoder: pw.newEncoder(k, marker),
+ p: pw,
+ }
+}
+
+// @@@ Positions
+
+func (w *writer) pos(p poser) {
+ w.sync(syncPos)
+ pos := p.Pos()
+
+ // TODO(mdempsky): Track down the remaining cases here and fix them.
+ if !w.bool(pos.IsKnown()) {
+ return
+ }
+
+ // TODO(mdempsky): Delta encoding. Also, if there's a b-side, update
+ // its position base too (but not vice versa!).
+ w.posBase(pos.Base())
+ w.uint(pos.Line())
+ w.uint(pos.Col())
+}
+
+func (w *writer) posBase(b *syntax.PosBase) {
+ w.reloc(relocPosBase, w.p.posBaseIdx(b))
+}
+
+func (pw *pkgWriter) posBaseIdx(b *syntax.PosBase) int {
+ if idx, ok := pw.posBasesIdx[b]; ok {
+ return idx
+ }
+
+ w := pw.newWriter(relocPosBase, syncPosBase)
+ w.p.posBasesIdx[b] = w.idx
+
+ w.string(trimFilename(b))
+
+ if !w.bool(b.IsFileBase()) {
+ w.pos(b)
+ w.uint(b.Line())
+ w.uint(b.Col())
+ }
+
+ return w.flush()
+}
+
+// @@@ Packages
+
+func (w *writer) pkg(pkg *types2.Package) {
+ w.sync(syncPkg)
+ w.reloc(relocPkg, w.p.pkgIdx(pkg))
+}
+
+func (pw *pkgWriter) pkgIdx(pkg *types2.Package) int {
+ if idx, ok := pw.pkgsIdx[pkg]; ok {
+ return idx
+ }
+
+ w := pw.newWriter(relocPkg, syncPkgDef)
+ pw.pkgsIdx[pkg] = w.idx
+
+ if pkg == nil {
+ w.string("builtin")
+ } else {
+ var path string
+ if pkg != w.p.curpkg {
+ path = pkg.Path()
+ }
+ w.string(path)
+ w.string(pkg.Name())
+ w.len(pkg.Height())
+
+ w.len(len(pkg.Imports()))
+ for _, imp := range pkg.Imports() {
+ w.pkg(imp)
+ }
+ }
+
+ return w.flush()
+}
+
+// @@@ Types
+
+func (w *writer) typ(typ types2.Type) {
+ w.typInfo(w.p.typIdx(typ, w.dict))
+}
+
+func (w *writer) typInfo(info typeInfo) {
+ w.sync(syncType)
+ if w.bool(info.derived) {
+ w.len(info.idx)
+ w.derived = true
+ } else {
+ w.reloc(relocType, info.idx)
+ }
+}
+
+// typIdx returns the index where the export data description of type
+// can be read back in. If no such index exists yet, it's created.
+//
+// typIdx also reports whether typ is a derived type; that is, whether
+// its identity depends on type parameters.
+func (pw *pkgWriter) typIdx(typ types2.Type, dict *writerDict) typeInfo {
+ if quirksMode() {
+ typ = pw.dups.orig(typ)
+ }
+
+ if idx, ok := pw.typsIdx[typ]; ok {
+ return typeInfo{idx: idx, derived: false}
+ }
+ if dict != nil {
+ if idx, ok := dict.derivedIdx[typ]; ok {
+ return typeInfo{idx: idx, derived: true}
+ }
+ }
+
+ w := pw.newWriter(relocType, syncTypeIdx)
+ w.dict = dict
+
+ switch typ := typ.(type) {
+ default:
+ base.Fatalf("unexpected type: %v (%T)", typ, typ)
+
+ case *types2.Basic:
+ switch kind := typ.Kind(); {
+ case kind == types2.Invalid:
+ base.Fatalf("unexpected types2.Invalid")
+
+ case types2.Typ[kind] == typ:
+ w.code(typeBasic)
+ w.len(int(kind))
+
+ default:
+ // Handle "byte" and "rune" as references to their TypeName.
+ obj := types2.Universe.Lookup(typ.Name())
+ assert(obj.Type() == typ)
+
+ w.code(typeNamed)
+ w.obj(obj, nil)
+ }
+
+ case *types2.Named:
+ // Type aliases can refer to uninstantiated generic types, so we
+ // might see len(TParams) != 0 && len(TArgs) == 0 here.
+ // TODO(mdempsky): Revisit after #46477 is resolved.
+ assert(typ.TParams().Len() == typ.TArgs().Len() || typ.TArgs().Len() == 0)
+
+ // TODO(mdempsky): Why do we need to loop here?
+ orig := typ
+ for orig.TArgs() != nil {
+ orig = orig.Orig()
+ }
+
+ w.code(typeNamed)
+ w.obj(orig.Obj(), typ.TArgs())
+
+ case *types2.TypeParam:
+ index := func() int {
+ for idx, name := range w.dict.implicits {
+ if name.Type().(*types2.TypeParam) == typ {
+ return idx
+ }
+ }
+
+ return len(w.dict.implicits) + typ.Index()
+ }()
+
+ w.derived = true
+ w.code(typeTypeParam)
+ w.len(index)
+
+ case *types2.Array:
+ w.code(typeArray)
+ w.uint64(uint64(typ.Len()))
+ w.typ(typ.Elem())
+
+ case *types2.Chan:
+ w.code(typeChan)
+ w.len(int(typ.Dir()))
+ w.typ(typ.Elem())
+
+ case *types2.Map:
+ w.code(typeMap)
+ w.typ(typ.Key())
+ w.typ(typ.Elem())
+
+ case *types2.Pointer:
+ w.code(typePointer)
+ w.typ(typ.Elem())
+
+ case *types2.Signature:
+ assert(typ.TParams() == nil)
+ w.code(typeSignature)
+ w.signature(typ)
+
+ case *types2.Slice:
+ w.code(typeSlice)
+ w.typ(typ.Elem())
+
+ case *types2.Struct:
+ w.code(typeStruct)
+ w.structType(typ)
+
+ case *types2.Interface:
+ w.code(typeInterface)
+ w.interfaceType(typ)
+
+ case *types2.Union:
+ w.code(typeUnion)
+ w.unionType(typ)
+ }
+
+ if w.derived {
+ idx := len(dict.derived)
+ dict.derived = append(dict.derived, derivedInfo{idx: w.flush()})
+ dict.derivedIdx[typ] = idx
+ return typeInfo{idx: idx, derived: true}
+ }
+
+ pw.typsIdx[typ] = w.idx
+ return typeInfo{idx: w.flush(), derived: false}
+}
+
+func (w *writer) structType(typ *types2.Struct) {
+ w.len(typ.NumFields())
+ for i := 0; i < typ.NumFields(); i++ {
+ f := typ.Field(i)
+ w.pos(f)
+ w.selector(f)
+ w.typ(f.Type())
+ w.string(typ.Tag(i))
+ w.bool(f.Embedded())
+ }
+}
+
+func (w *writer) unionType(typ *types2.Union) {
+ w.len(typ.Len())
+ for i := 0; i < typ.Len(); i++ {
+ t := typ.Term(i)
+ w.bool(t.Tilde())
+ w.typ(t.Type())
+ }
+}
+
+func (w *writer) interfaceType(typ *types2.Interface) {
+ w.len(typ.NumExplicitMethods())
+ w.len(typ.NumEmbeddeds())
+
+ for i := 0; i < typ.NumExplicitMethods(); i++ {
+ m := typ.ExplicitMethod(i)
+ sig := m.Type().(*types2.Signature)
+ assert(sig.TParams() == nil)
+
+ w.pos(m)
+ w.selector(m)
+ w.signature(sig)
+ }
+
+ for i := 0; i < typ.NumEmbeddeds(); i++ {
+ w.typ(typ.EmbeddedType(i))
+ }
+}
+
+func (w *writer) signature(sig *types2.Signature) {
+ w.sync(syncSignature)
+ w.params(sig.Params())
+ w.params(sig.Results())
+ w.bool(sig.Variadic())
+}
+
+func (w *writer) params(typ *types2.Tuple) {
+ w.sync(syncParams)
+ w.len(typ.Len())
+ for i := 0; i < typ.Len(); i++ {
+ w.param(typ.At(i))
+ }
+}
+
+func (w *writer) param(param *types2.Var) {
+ w.sync(syncParam)
+ w.pos(param)
+ w.localIdent(param)
+ w.typ(param.Type())
+}
+
+// @@@ Objects
+
+func (w *writer) obj(obj types2.Object, explicits *types2.TypeList) {
+ explicitInfos := make([]typeInfo, explicits.Len())
+ for i := range explicitInfos {
+ explicitInfos[i] = w.p.typIdx(explicits.At(i), w.dict)
+ }
+ info := objInfo{idx: w.p.objIdx(obj), explicits: explicitInfos}
+
+ if _, ok := obj.(*types2.Func); ok && info.anyDerived() {
+ idx := -1
+ for i, prev := range w.dict.funcs {
+ if prev.equals(info) {
+ idx = i
+ }
+ }
+ if idx < 0 {
+ idx = len(w.dict.funcs)
+ w.dict.funcs = append(w.dict.funcs, info)
+ }
+
+ // TODO(mdempsky): Push up into expr; this shouldn't appear
+ // outside of expression context.
+ w.sync(syncObject)
+ w.bool(true)
+ w.len(idx)
+ return
+ }
+
+ // TODO(mdempsky): Push up into typIdx; this shouldn't be needed
+ // except while writing out types.
+ if isDefinedType(obj) && obj.Pkg() == w.p.curpkg {
+ decl, ok := w.p.typDecls[obj.(*types2.TypeName)]
+ assert(ok)
+ if len(decl.implicits) != 0 {
+ w.derived = true
+ }
+ }
+
+ w.sync(syncObject)
+ w.bool(false)
+ w.reloc(relocObj, info.idx)
+
+ w.len(len(info.explicits))
+ for _, info := range info.explicits {
+ w.typInfo(info)
+ }
+}
+
+func (pw *pkgWriter) objIdx(obj types2.Object) int {
+ if idx, ok := pw.globalsIdx[obj]; ok {
+ return idx
+ }
+
+ dict := &writerDict{
+ derivedIdx: make(map[types2.Type]int),
+ }
+
+ if isDefinedType(obj) && obj.Pkg() == pw.curpkg {
+ decl, ok := pw.typDecls[obj.(*types2.TypeName)]
+ assert(ok)
+ dict.implicits = decl.implicits
+ }
+
+ w := pw.newWriter(relocObj, syncObject1)
+ w.ext = pw.newWriter(relocObjExt, syncObject1)
+ wname := pw.newWriter(relocName, syncObject1)
+ wdict := pw.newWriter(relocObjDict, syncObject1)
+
+ pw.globalsIdx[obj] = w.idx // break cycles
+ assert(w.ext.idx == w.idx)
+ assert(wname.idx == w.idx)
+ assert(wdict.idx == w.idx)
+
+ w.dict = dict
+ w.ext.dict = dict
+
+ code := w.doObj(obj)
+ w.flush()
+ w.ext.flush()
+
+ wname.qualifiedIdent(obj)
+ wname.code(code)
+ wname.flush()
+
+ wdict.objDict(obj, w.dict)
+ wdict.flush()
+
+ return w.idx
+}
+
+func (w *writer) doObj(obj types2.Object) codeObj {
+ if obj.Pkg() != w.p.curpkg {
+ return objStub
+ }
+
+ switch obj := obj.(type) {
+ default:
+ w.p.unexpected("object", obj)
+ panic("unreachable")
+
+ case *types2.Const:
+ w.pos(obj)
+ w.value(obj.Type(), obj.Val())
+ return objConst
+
+ case *types2.Func:
+ decl, ok := w.p.funDecls[obj]
+ assert(ok)
+ sig := obj.Type().(*types2.Signature)
+
+ w.pos(obj)
+ w.typeParamNames(sig.TParams())
+ w.signature(sig)
+ w.pos(decl)
+ w.ext.funcExt(obj)
+ return objFunc
+
+ case *types2.TypeName:
+ decl, ok := w.p.typDecls[obj]
+ assert(ok)
+
+ if obj.IsAlias() {
+ w.pos(obj)
+ w.typ(obj.Type())
+ return objAlias
+ }
+
+ named := obj.Type().(*types2.Named)
+ assert(named.TArgs() == nil)
+
+ w.pos(obj)
+ w.typeParamNames(named.TParams())
+ w.ext.typeExt(obj)
+ w.typExpr(decl.Type)
+
+ w.len(named.NumMethods())
+ for i := 0; i < named.NumMethods(); i++ {
+ w.method(named.Method(i))
+ }
+
+ return objType
+
+ case *types2.Var:
+ w.pos(obj)
+ w.typ(obj.Type())
+ w.ext.varExt(obj)
+ return objVar
+ }
+}
+
+// typExpr writes the type represented by the given expression.
+func (w *writer) typExpr(expr syntax.Expr) {
+ tv, ok := w.p.info.Types[expr]
+ assert(ok)
+ assert(tv.IsType())
+ w.typ(tv.Type)
+}
+
+func (w *writer) value(typ types2.Type, val constant.Value) {
+ w.sync(syncValue)
+ w.typ(typ)
+ w.rawValue(val)
+}
+
+// objDict writes the dictionary needed for reading the given object.
+func (w *writer) objDict(obj types2.Object, dict *writerDict) {
+ // TODO(mdempsky): Split objDict into multiple entries? reader.go
+ // doesn't care about the type parameter bounds, and reader2.go
+ // doesn't care about referenced functions.
+
+ w.dict = dict // TODO(mdempsky): This is a bit sketchy.
+
+ w.len(len(dict.implicits))
+
+ tparams := objTypeParams(obj)
+ ntparams := tparams.Len()
+ w.len(ntparams)
+ for i := 0; i < ntparams; i++ {
+ w.typ(tparams.At(i).Constraint())
+ }
+
+ nderived := len(dict.derived)
+ w.len(nderived)
+ for _, typ := range dict.derived {
+ w.reloc(relocType, typ.idx)
+ w.bool(typ.needed)
+ }
+
+ nfuncs := len(dict.funcs)
+ w.len(nfuncs)
+ for _, fn := range dict.funcs {
+ w.reloc(relocObj, fn.idx)
+ w.len(len(fn.explicits))
+ for _, targ := range fn.explicits {
+ w.typInfo(targ)
+ }
+ }
+
+ assert(len(dict.derived) == nderived)
+ assert(len(dict.funcs) == nfuncs)
+}
+
+func (w *writer) typeParamNames(tparams *types2.TParamList) {
+ w.sync(syncTypeParamNames)
+
+ ntparams := tparams.Len()
+ for i := 0; i < ntparams; i++ {
+ tparam := tparams.At(i).Obj()
+ w.pos(tparam)
+ w.localIdent(tparam)
+ }
+}
+
+func (w *writer) method(meth *types2.Func) {
+ decl, ok := w.p.funDecls[meth]
+ assert(ok)
+ sig := meth.Type().(*types2.Signature)
+
+ w.sync(syncMethod)
+ w.pos(meth)
+ w.selector(meth)
+ w.typeParamNames(sig.RParams())
+ w.param(sig.Recv())
+ w.signature(sig)
+
+ w.pos(decl) // XXX: Hack to workaround linker limitations.
+ w.ext.funcExt(meth)
+}
+
+// qualifiedIdent writes out the name of an object declared at package
+// scope. (For now, it's also used to refer to local defined types.)
+func (w *writer) qualifiedIdent(obj types2.Object) {
+ w.sync(syncSym)
+
+ name := obj.Name()
+ if isDefinedType(obj) && obj.Pkg() == w.p.curpkg {
+ decl, ok := w.p.typDecls[obj.(*types2.TypeName)]
+ assert(ok)
+ if decl.gen != 0 {
+ // TODO(mdempsky): Find a better solution than embedding middle
+ // dot in the symbol name; this is terrible.
+ name = fmt.Sprintf("%s·%v", name, decl.gen)
+ }
+ }
+
+ w.pkg(obj.Pkg())
+ w.string(name)
+}
+
+// TODO(mdempsky): We should be able to omit pkg from both localIdent
+// and selector, because they should always be known from context.
+// However, past frustrations with this optimization in iexport make
+// me a little nervous to try it again.
+
+// localIdent writes the name of a locally declared object (i.e.,
+// objects that can only be accessed by name, within the context of a
+// particular function).
+func (w *writer) localIdent(obj types2.Object) {
+ assert(!isGlobal(obj))
+ w.sync(syncLocalIdent)
+ w.pkg(obj.Pkg())
+ w.string(obj.Name())
+}
+
+// selector writes the name of a field or method (i.e., objects that
+// can only be accessed using selector expressions).
+func (w *writer) selector(obj types2.Object) {
+ w.sync(syncSelector)
+ w.pkg(obj.Pkg())
+ w.string(obj.Name())
+}
+
+// @@@ Compiler extensions
+
+func (w *writer) funcExt(obj *types2.Func) {
+ decl, ok := w.p.funDecls[obj]
+ assert(ok)
+
+ // TODO(mdempsky): Extend these pragma validation flags to account
+ // for generics. E.g., linkname probably doesn't make sense at
+ // least.
+
+ pragma := asPragmaFlag(decl.Pragma)
+ if pragma&ir.Systemstack != 0 && pragma&ir.Nosplit != 0 {
+ w.p.errorf(decl, "go:nosplit and go:systemstack cannot be combined")
+ }
+
+ if decl.Body != nil {
+ if pragma&ir.Noescape != 0 {
+ w.p.errorf(decl, "can only use //go:noescape with external func implementations")
+ }
+ } else {
+ if base.Flag.Complete || decl.Name.Value == "init" {
+ // Linknamed functions are allowed to have no body. Hopefully
+ // the linkname target has a body. See issue 23311.
+ if _, ok := w.p.linknames[obj]; !ok {
+ w.p.errorf(decl, "missing function body")
+ }
+ }
+ }
+
+ sig, block := obj.Type().(*types2.Signature), decl.Body
+ body, closureVars := w.p.bodyIdx(w.p.curpkg, sig, block, w.dict)
+ assert(len(closureVars) == 0)
+
+ w.sync(syncFuncExt)
+ w.pragmaFlag(pragma)
+ w.linkname(obj)
+ w.bool(false) // stub extension
+ w.reloc(relocBody, body)
+ w.sync(syncEOF)
+}
+
+func (w *writer) typeExt(obj *types2.TypeName) {
+ decl, ok := w.p.typDecls[obj]
+ assert(ok)
+
+ w.sync(syncTypeExt)
+
+ w.pragmaFlag(asPragmaFlag(decl.Pragma))
+
+ // No LSym.SymIdx info yet.
+ w.int64(-1)
+ w.int64(-1)
+}
+
+func (w *writer) varExt(obj *types2.Var) {
+ w.sync(syncVarExt)
+ w.linkname(obj)
+}
+
+func (w *writer) linkname(obj types2.Object) {
+ w.sync(syncLinkname)
+ w.int64(-1)
+ w.string(w.p.linknames[obj])
+}
+
+func (w *writer) pragmaFlag(p ir.PragmaFlag) {
+ w.sync(syncPragma)
+ w.int(int(p))
+}
+
+// @@@ Function bodies
+
+func (pw *pkgWriter) bodyIdx(pkg *types2.Package, sig *types2.Signature, block *syntax.BlockStmt, dict *writerDict) (idx int, closureVars []posObj) {
+ w := pw.newWriter(relocBody, syncFuncBody)
+ w.dict = dict
+
+ w.funcargs(sig)
+ if w.bool(block != nil) {
+ w.stmts(block.List)
+ w.pos(block.Rbrace)
+ }
+
+ return w.flush(), w.closureVars
+}
+
+func (w *writer) funcargs(sig *types2.Signature) {
+ do := func(params *types2.Tuple, result bool) {
+ for i := 0; i < params.Len(); i++ {
+ w.funcarg(params.At(i), result)
+ }
+ }
+
+ if recv := sig.Recv(); recv != nil {
+ w.funcarg(recv, false)
+ }
+ do(sig.Params(), false)
+ do(sig.Results(), true)
+}
+
+func (w *writer) funcarg(param *types2.Var, result bool) {
+ if param.Name() != "" || result {
+ w.addLocal(param)
+ }
+}
+
+func (w *writer) addLocal(obj *types2.Var) {
+ w.sync(syncAddLocal)
+ idx := len(w.localsIdx)
+ if enableSync {
+ w.int(idx)
+ }
+ if w.localsIdx == nil {
+ w.localsIdx = make(map[*types2.Var]int)
+ }
+ w.localsIdx[obj] = idx
+}
+
+func (w *writer) useLocal(pos syntax.Pos, obj *types2.Var) {
+ w.sync(syncUseObjLocal)
+
+ if idx, ok := w.localsIdx[obj]; w.bool(ok) {
+ w.len(idx)
+ return
+ }
+
+ idx, ok := w.closureVarsIdx[obj]
+ if !ok {
+ if w.closureVarsIdx == nil {
+ w.closureVarsIdx = make(map[*types2.Var]int)
+ }
+ idx = len(w.closureVars)
+ w.closureVars = append(w.closureVars, posObj{pos, obj})
+ w.closureVarsIdx[obj] = idx
+ }
+ w.len(idx)
+}
+
+func (w *writer) openScope(pos syntax.Pos) {
+ w.sync(syncOpenScope)
+ w.pos(pos)
+}
+
+func (w *writer) closeScope(pos syntax.Pos) {
+ w.sync(syncCloseScope)
+ w.pos(pos)
+ w.closeAnotherScope()
+}
+
+func (w *writer) closeAnotherScope() {
+ w.sync(syncCloseAnotherScope)
+}
+
+// @@@ Statements
+
+func (w *writer) stmt(stmt syntax.Stmt) {
+ var stmts []syntax.Stmt
+ if stmt != nil {
+ stmts = []syntax.Stmt{stmt}
+ }
+ w.stmts(stmts)
+}
+
+func (w *writer) stmts(stmts []syntax.Stmt) {
+ w.sync(syncStmts)
+ for _, stmt := range stmts {
+ w.stmt1(stmt)
+ }
+ w.code(stmtEnd)
+ w.sync(syncStmtsEnd)
+}
+
+func (w *writer) stmt1(stmt syntax.Stmt) {
+ switch stmt := stmt.(type) {
+ default:
+ w.p.unexpected("statement", stmt)
+
+ case nil, *syntax.EmptyStmt:
+ return
+
+ case *syntax.AssignStmt:
+ switch {
+ case stmt.Rhs == nil:
+ w.code(stmtIncDec)
+ w.op(binOps[stmt.Op])
+ w.expr(stmt.Lhs)
+ w.pos(stmt)
+
+ case stmt.Op != 0 && stmt.Op != syntax.Def:
+ w.code(stmtAssignOp)
+ w.op(binOps[stmt.Op])
+ w.expr(stmt.Lhs)
+ w.pos(stmt)
+ w.expr(stmt.Rhs)
+
+ default:
+ w.code(stmtAssign)
+ w.pos(stmt)
+ w.exprList(stmt.Rhs)
+ w.assignList(stmt.Lhs)
+ }
+
+ case *syntax.BlockStmt:
+ w.code(stmtBlock)
+ w.blockStmt(stmt)
+
+ case *syntax.BranchStmt:
+ w.code(stmtBranch)
+ w.pos(stmt)
+ w.op(branchOps[stmt.Tok])
+ w.optLabel(stmt.Label)
+
+ case *syntax.CallStmt:
+ w.code(stmtCall)
+ w.pos(stmt)
+ w.op(callOps[stmt.Tok])
+ w.expr(stmt.Call)
+
+ case *syntax.DeclStmt:
+ for _, decl := range stmt.DeclList {
+ w.declStmt(decl)
+ }
+
+ case *syntax.ExprStmt:
+ w.code(stmtExpr)
+ w.expr(stmt.X)
+
+ case *syntax.ForStmt:
+ w.code(stmtFor)
+ w.forStmt(stmt)
+
+ case *syntax.IfStmt:
+ w.code(stmtIf)
+ w.ifStmt(stmt)
+
+ case *syntax.LabeledStmt:
+ w.code(stmtLabel)
+ w.pos(stmt)
+ w.label(stmt.Label)
+ w.stmt1(stmt.Stmt)
+
+ case *syntax.ReturnStmt:
+ w.code(stmtReturn)
+ w.pos(stmt)
+ w.exprList(stmt.Results)
+
+ case *syntax.SelectStmt:
+ w.code(stmtSelect)
+ w.selectStmt(stmt)
+
+ case *syntax.SendStmt:
+ w.code(stmtSend)
+ w.pos(stmt)
+ w.expr(stmt.Chan)
+ w.expr(stmt.Value)
+
+ case *syntax.SwitchStmt:
+ w.code(stmtSwitch)
+ w.switchStmt(stmt)
+ }
+}
+
+func (w *writer) assignList(expr syntax.Expr) {
+ exprs := unpackListExpr(expr)
+ w.len(len(exprs))
+
+ for _, expr := range exprs {
+ if name, ok := expr.(*syntax.Name); ok && name.Value != "_" {
+ if obj, ok := w.p.info.Defs[name]; ok {
+ obj := obj.(*types2.Var)
+
+ w.bool(true)
+ w.pos(obj)
+ w.localIdent(obj)
+ w.typ(obj.Type())
+
+ // TODO(mdempsky): Minimize locals index size by deferring
+ // this until the variables actually come into scope.
+ w.addLocal(obj)
+ continue
+ }
+ }
+
+ w.bool(false)
+ w.expr(expr)
+ }
+}
+
+func (w *writer) declStmt(decl syntax.Decl) {
+ switch decl := decl.(type) {
+ default:
+ w.p.unexpected("declaration", decl)
+
+ case *syntax.ConstDecl:
+
+ case *syntax.TypeDecl:
+ // Quirk: The legacy inliner doesn't support inlining functions
+ // with type declarations. Unified IR doesn't have any need to
+ // write out type declarations explicitly (they're always looked
+ // up via global index tables instead), so we just write out a
+ // marker so the reader knows to synthesize a fake declaration to
+ // prevent inlining.
+ if quirksMode() {
+ w.code(stmtTypeDeclHack)
+ }
+
+ case *syntax.VarDecl:
+ values := unpackListExpr(decl.Values)
+
+ // Quirk: When N variables are declared with N initialization
+ // values, we need to decompose that into N interleaved
+ // declarations+initializations, because it leads to different
+ // (albeit semantically equivalent) code generation.
+ if quirksMode() && len(decl.NameList) == len(values) {
+ for i, name := range decl.NameList {
+ w.code(stmtAssign)
+ w.pos(decl)
+ w.exprList(values[i])
+ w.assignList(name)
+ }
+ break
+ }
+
+ w.code(stmtAssign)
+ w.pos(decl)
+ w.exprList(decl.Values)
+ w.assignList(namesAsExpr(decl.NameList))
+ }
+}
+
+func (w *writer) blockStmt(stmt *syntax.BlockStmt) {
+ w.sync(syncBlockStmt)
+ w.openScope(stmt.Pos())
+ w.stmts(stmt.List)
+ w.closeScope(stmt.Rbrace)
+}
+
+func (w *writer) forStmt(stmt *syntax.ForStmt) {
+ w.sync(syncForStmt)
+ w.openScope(stmt.Pos())
+
+ if rang, ok := stmt.Init.(*syntax.RangeClause); w.bool(ok) {
+ w.pos(rang)
+ w.expr(rang.X)
+ w.assignList(rang.Lhs)
+ } else {
+ w.pos(stmt)
+ w.stmt(stmt.Init)
+ w.expr(stmt.Cond)
+ w.stmt(stmt.Post)
+ }
+
+ w.blockStmt(stmt.Body)
+ w.closeAnotherScope()
+}
+
+func (w *writer) ifStmt(stmt *syntax.IfStmt) {
+ w.sync(syncIfStmt)
+ w.openScope(stmt.Pos())
+ w.pos(stmt)
+ w.stmt(stmt.Init)
+ w.expr(stmt.Cond)
+ w.blockStmt(stmt.Then)
+ w.stmt(stmt.Else)
+ w.closeAnotherScope()
+}
+
+func (w *writer) selectStmt(stmt *syntax.SelectStmt) {
+ w.sync(syncSelectStmt)
+
+ w.pos(stmt)
+ w.len(len(stmt.Body))
+ for i, clause := range stmt.Body {
+ if i > 0 {
+ w.closeScope(clause.Pos())
+ }
+ w.openScope(clause.Pos())
+
+ w.pos(clause)
+ w.stmt(clause.Comm)
+ w.stmts(clause.Body)
+ }
+ if len(stmt.Body) > 0 {
+ w.closeScope(stmt.Rbrace)
+ }
+}
+
+func (w *writer) switchStmt(stmt *syntax.SwitchStmt) {
+ w.sync(syncSwitchStmt)
+
+ w.openScope(stmt.Pos())
+ w.pos(stmt)
+ w.stmt(stmt.Init)
+
+ if guard, ok := stmt.Tag.(*syntax.TypeSwitchGuard); w.bool(ok) {
+ w.pos(guard)
+ if tag := guard.Lhs; w.bool(tag != nil) {
+ w.pos(tag)
+ w.string(tag.Value)
+ }
+ w.expr(guard.X)
+ } else {
+ w.expr(stmt.Tag)
+ }
+
+ w.len(len(stmt.Body))
+ for i, clause := range stmt.Body {
+ if i > 0 {
+ w.closeScope(clause.Pos())
+ }
+ w.openScope(clause.Pos())
+
+ w.pos(clause)
+ w.exprList(clause.Cases)
+
+ if obj, ok := w.p.info.Implicits[clause]; ok {
+ // TODO(mdempsky): These pos details are quirkish, but also
+ // necessary so the variable's position is correct for DWARF
+ // scope assignment later. It would probably be better for us to
+ // instead just set the variable's DWARF scoping info earlier so
+ // we can give it the correct position information.
+ pos := clause.Pos()
+ if typs := unpackListExpr(clause.Cases); len(typs) != 0 {
+ pos = typeExprEndPos(typs[len(typs)-1])
+ }
+ w.pos(pos)
+
+ obj := obj.(*types2.Var)
+ w.typ(obj.Type())
+ w.addLocal(obj)
+ }
+
+ w.stmts(clause.Body)
+ }
+ if len(stmt.Body) > 0 {
+ w.closeScope(stmt.Rbrace)
+ }
+
+ w.closeScope(stmt.Rbrace)
+}
+
+func (w *writer) label(label *syntax.Name) {
+ w.sync(syncLabel)
+
+ // TODO(mdempsky): Replace label strings with dense indices.
+ w.string(label.Value)
+}
+
+func (w *writer) optLabel(label *syntax.Name) {
+ w.sync(syncOptLabel)
+ if w.bool(label != nil) {
+ w.label(label)
+ }
+}
+
+// @@@ Expressions
+
+func (w *writer) expr(expr syntax.Expr) {
+ expr = unparen(expr) // skip parens; unneeded after typecheck
+
+ obj, targs := lookupObj(w.p.info, expr)
+
+ if tv, ok := w.p.info.Types[expr]; ok {
+ // TODO(mdempsky): Be more judicious about which types are marked as "needed".
+ w.needType(tv.Type)
+
+ if tv.IsType() {
+ w.code(exprType)
+ w.typ(tv.Type)
+ return
+ }
+
+ if tv.Value != nil {
+ pos := expr.Pos()
+ if quirksMode() {
+ if obj != nil {
+ // Quirk: IR (and thus iexport) doesn't track position
+ // information for uses of declared objects.
+ pos = syntax.Pos{}
+ } else if tv.Value.Kind() == constant.String {
+ // Quirk: noder.sum picks a particular position for certain
+ // string concatenations.
+ pos = sumPos(expr)
+ }
+ }
+
+ w.code(exprConst)
+ w.pos(pos)
+ w.value(tv.Type, tv.Value)
+
+ // TODO(mdempsky): These details are only important for backend
+ // diagnostics. Explore writing them out separately.
+ w.op(constExprOp(expr))
+ w.string(syntax.String(expr))
+ return
+ }
+ }
+
+ if obj != nil {
+ if isGlobal(obj) {
+ w.code(exprName)
+ w.obj(obj, targs)
+ return
+ }
+
+ obj := obj.(*types2.Var)
+ assert(targs.Len() == 0)
+
+ w.code(exprLocal)
+ w.useLocal(expr.Pos(), obj)
+ return
+ }
+
+ switch expr := expr.(type) {
+ default:
+ w.p.unexpected("expression", expr)
+
+ case nil: // absent slice index, for condition, or switch tag
+ w.code(exprNone)
+
+ case *syntax.Name:
+ assert(expr.Value == "_")
+ w.code(exprBlank)
+
+ case *syntax.CompositeLit:
+ w.code(exprCompLit)
+ w.compLit(expr)
+
+ case *syntax.FuncLit:
+ w.code(exprFuncLit)
+ w.funcLit(expr)
+
+ case *syntax.SelectorExpr:
+ sel, ok := w.p.info.Selections[expr]
+ assert(ok)
+
+ w.code(exprSelector)
+ w.expr(expr.X)
+ w.pos(expr)
+ w.selector(sel.Obj())
+
+ case *syntax.IndexExpr:
+ tv, ok := w.p.info.Types[expr.Index]
+ assert(ok && tv.IsValue())
+
+ w.code(exprIndex)
+ w.expr(expr.X)
+ w.pos(expr)
+ w.expr(expr.Index)
+
+ case *syntax.SliceExpr:
+ w.code(exprSlice)
+ w.expr(expr.X)
+ w.pos(expr)
+ for _, n := range &expr.Index {
+ w.expr(n)
+ }
+
+ case *syntax.AssertExpr:
+ w.code(exprAssert)
+ w.expr(expr.X)
+ w.pos(expr)
+ w.expr(expr.Type)
+
+ case *syntax.Operation:
+ if expr.Y == nil {
+ w.code(exprUnaryOp)
+ w.op(unOps[expr.Op])
+ w.pos(expr)
+ w.expr(expr.X)
+ break
+ }
+
+ w.code(exprBinaryOp)
+ w.op(binOps[expr.Op])
+ w.expr(expr.X)
+ w.pos(expr)
+ w.expr(expr.Y)
+
+ case *syntax.CallExpr:
+ tv, ok := w.p.info.Types[expr.Fun]
+ assert(ok)
+ if tv.IsType() {
+ assert(len(expr.ArgList) == 1)
+ assert(!expr.HasDots)
+
+ w.code(exprConvert)
+ w.typ(tv.Type)
+ w.pos(expr)
+ w.expr(expr.ArgList[0])
+ break
+ }
+
+ writeFunExpr := func() {
+ if selector, ok := unparen(expr.Fun).(*syntax.SelectorExpr); ok {
+ if sel, ok := w.p.info.Selections[selector]; ok && sel.Kind() == types2.MethodVal {
+ w.expr(selector.X)
+ w.bool(true) // method call
+ w.pos(selector)
+ w.selector(sel.Obj())
+ return
+ }
+ }
+
+ if inf, ok := w.p.info.Inferred[expr]; ok {
+ obj, _ := lookupObj(w.p.info, expr.Fun)
+ assert(obj != nil)
+
+ // As if w.expr(expr.Fun), but using inf.TArgs instead.
+ w.code(exprName)
+ w.obj(obj, inf.TArgs)
+ } else {
+ w.expr(expr.Fun)
+ }
+ w.bool(false) // not a method call (i.e., normal function call)
+ }
+
+ w.code(exprCall)
+ writeFunExpr()
+ w.pos(expr)
+ w.exprs(expr.ArgList)
+ w.bool(expr.HasDots)
+ }
+}
+
+func (w *writer) compLit(lit *syntax.CompositeLit) {
+ tv, ok := w.p.info.Types[lit]
+ assert(ok)
+
+ w.sync(syncCompLit)
+ w.pos(lit)
+ w.typ(tv.Type)
+
+ typ := tv.Type
+ if ptr, ok := typ.Underlying().(*types2.Pointer); ok {
+ typ = ptr.Elem()
+ }
+ str, isStruct := typ.Underlying().(*types2.Struct)
+
+ w.len(len(lit.ElemList))
+ for i, elem := range lit.ElemList {
+ if isStruct {
+ if kv, ok := elem.(*syntax.KeyValueExpr); ok {
+ // use position of expr.Key rather than of elem (which has position of ':')
+ w.pos(kv.Key)
+ w.len(fieldIndex(w.p.info, str, kv.Key.(*syntax.Name)))
+ elem = kv.Value
+ } else {
+ w.pos(elem)
+ w.len(i)
+ }
+ } else {
+ if kv, ok := elem.(*syntax.KeyValueExpr); w.bool(ok) {
+ // use position of expr.Key rather than of elem (which has position of ':')
+ w.pos(kv.Key)
+ w.expr(kv.Key)
+ elem = kv.Value
+ }
+ }
+ w.pos(elem)
+ w.expr(elem)
+ }
+}
+
+func (w *writer) funcLit(expr *syntax.FuncLit) {
+ tv, ok := w.p.info.Types[expr]
+ assert(ok)
+ sig := tv.Type.(*types2.Signature)
+
+ body, closureVars := w.p.bodyIdx(w.p.curpkg, sig, expr.Body, w.dict)
+
+ w.sync(syncFuncLit)
+ w.pos(expr)
+ w.pos(expr.Type) // for QuirksMode
+ w.signature(sig)
+
+ w.len(len(closureVars))
+ for _, cv := range closureVars {
+ w.pos(cv.pos)
+ if quirksMode() {
+ cv.pos = expr.Body.Rbrace
+ }
+ w.useLocal(cv.pos, cv.obj)
+ }
+
+ w.reloc(relocBody, body)
+}
+
+type posObj struct {
+ pos syntax.Pos
+ obj *types2.Var
+}
+
+func (w *writer) exprList(expr syntax.Expr) {
+ w.sync(syncExprList)
+ w.exprs(unpackListExpr(expr))
+}
+
+func (w *writer) exprs(exprs []syntax.Expr) {
+ if len(exprs) == 0 {
+ assert(exprs == nil)
+ }
+
+ w.sync(syncExprs)
+ w.len(len(exprs))
+ for _, expr := range exprs {
+ w.expr(expr)
+ }
+}
+
+func (w *writer) op(op ir.Op) {
+ // TODO(mdempsky): Remove in favor of explicit codes? Would make
+ // export data more stable against internal refactorings, but low
+ // priority at the moment.
+ assert(op != 0)
+ w.sync(syncOp)
+ w.len(int(op))
+}
+
+func (w *writer) needType(typ types2.Type) {
+ // Decompose tuple into component element types.
+ if typ, ok := typ.(*types2.Tuple); ok {
+ for i := 0; i < typ.Len(); i++ {
+ w.needType(typ.At(i).Type())
+ }
+ return
+ }
+
+ if info := w.p.typIdx(typ, w.dict); info.derived {
+ w.dict.derived[info.idx].needed = true
+ }
+}
+
+// @@@ Package initialization
+
+// Caution: This code is still clumsy, because toolstash -cmp is
+// particularly sensitive to it.
+
+type typeDeclGen struct {
+ *syntax.TypeDecl
+ gen int
+
+ // Implicit type parameters in scope at this type declaration.
+ implicits []*types2.TypeName
+}
+
+type fileImports struct {
+ importedEmbed, importedUnsafe bool
+}
+
+type declCollector struct {
+ pw *pkgWriter
+ typegen *int
+ file *fileImports
+ withinFunc bool
+ implicits []*types2.TypeName
+}
+
+func (c *declCollector) withTParams(obj types2.Object) *declCollector {
+ tparams := objTypeParams(obj)
+ n := tparams.Len()
+ if n == 0 {
+ return c
+ }
+
+ copy := *c
+ copy.implicits = copy.implicits[:len(copy.implicits):len(copy.implicits)]
+ for i := 0; i < n; i++ {
+ copy.implicits = append(copy.implicits, tparams.At(i).Obj())
+ }
+ return &copy
+}
+
+func (c *declCollector) Visit(n syntax.Node) syntax.Visitor {
+ pw := c.pw
+
+ switch n := n.(type) {
+ case *syntax.File:
+ pw.checkPragmas(n.Pragma, ir.GoBuildPragma, false)
+
+ case *syntax.ImportDecl:
+ pw.checkPragmas(n.Pragma, 0, false)
+
+ switch pkgNameOf(pw.info, n).Imported().Path() {
+ case "embed":
+ c.file.importedEmbed = true
+ case "unsafe":
+ c.file.importedUnsafe = true
+ }
+
+ case *syntax.ConstDecl:
+ pw.checkPragmas(n.Pragma, 0, false)
+
+ case *syntax.FuncDecl:
+ pw.checkPragmas(n.Pragma, funcPragmas, false)
+
+ obj := pw.info.Defs[n.Name].(*types2.Func)
+ pw.funDecls[obj] = n
+
+ return c.withTParams(obj)
+
+ case *syntax.TypeDecl:
+ obj := pw.info.Defs[n.Name].(*types2.TypeName)
+ d := typeDeclGen{TypeDecl: n, implicits: c.implicits}
+
+ if n.Alias {
+ pw.checkPragmas(n.Pragma, 0, false)
+ } else {
+ pw.checkPragmas(n.Pragma, typePragmas, false)
+
+ // Assign a unique ID to function-scoped defined types.
+ if c.withinFunc {
+ *c.typegen++
+ d.gen = *c.typegen
+ }
+ }
+
+ pw.typDecls[obj] = d
+
+ // TODO(mdempsky): Omit? Not strictly necessary; only matters for
+ // type declarations within function literals within parameterized
+ // type declarations, but types2 the function literals will be
+ // constant folded away.
+ return c.withTParams(obj)
+
+ case *syntax.VarDecl:
+ pw.checkPragmas(n.Pragma, 0, true)
+
+ if p, ok := n.Pragma.(*pragmas); ok && len(p.Embeds) > 0 {
+ if err := checkEmbed(n, c.file.importedEmbed, c.withinFunc); err != nil {
+ pw.errorf(p.Embeds[0].Pos, "%s", err)
+ }
+ }
+
+ // Workaround for #46208. For variable declarations that
+ // declare multiple variables and have an explicit type
+ // expression, the type expression is evaluated multiple
+ // times. This affects toolstash -cmp, because iexport is
+ // sensitive to *types.Type pointer identity.
+ if quirksMode() && n.Type != nil {
+ tv, ok := pw.info.Types[n.Type]
+ assert(ok)
+ assert(tv.IsType())
+ for _, name := range n.NameList {
+ obj := pw.info.Defs[name].(*types2.Var)
+ pw.dups.add(obj.Type(), tv.Type)
+ }
+ }
+
+ case *syntax.BlockStmt:
+ if !c.withinFunc {
+ copy := *c
+ copy.withinFunc = true
+ return &copy
+ }
+ }
+
+ return c
+}
+
+func (pw *pkgWriter) collectDecls(noders []*noder) {
+ var typegen int
+ for _, p := range noders {
+ var file fileImports
+
+ syntax.Walk(p.file, &declCollector{
+ pw: pw,
+ typegen: &typegen,
+ file: &file,
+ })
+
+ pw.cgoPragmas = append(pw.cgoPragmas, p.pragcgobuf...)
+
+ for _, l := range p.linknames {
+ if !file.importedUnsafe {
+ pw.errorf(l.pos, "//go:linkname only allowed in Go files that import \"unsafe\"")
+ continue
+ }
+
+ switch obj := pw.curpkg.Scope().Lookup(l.local).(type) {
+ case *types2.Func, *types2.Var:
+ if _, ok := pw.linknames[obj]; !ok {
+ pw.linknames[obj] = l.remote
+ } else {
+ pw.errorf(l.pos, "duplicate //go:linkname for %s", l.local)
+ }
+
+ default:
+ // TODO(mdempsky): Enable after #42938 is fixed.
+ if false {
+ pw.errorf(l.pos, "//go:linkname must refer to declared function or variable")
+ }
+ }
+ }
+ }
+}
+
+func (pw *pkgWriter) checkPragmas(p syntax.Pragma, allowed ir.PragmaFlag, embedOK bool) {
+ if p == nil {
+ return
+ }
+ pragma := p.(*pragmas)
+
+ for _, pos := range pragma.Pos {
+ if pos.Flag&^allowed != 0 {
+ pw.errorf(pos.Pos, "misplaced compiler directive")
+ }
+ }
+
+ if !embedOK {
+ for _, e := range pragma.Embeds {
+ pw.errorf(e.Pos, "misplaced go:embed directive")
+ }
+ }
+}
+
+func (w *writer) pkgInit(noders []*noder) {
+ if quirksMode() {
+ posBases := posBasesOf(noders)
+ w.len(len(posBases))
+ for _, posBase := range posBases {
+ w.posBase(posBase)
+ }
+
+ objs := importedObjsOf(w.p.curpkg, w.p.info, noders)
+ w.len(len(objs))
+ for _, obj := range objs {
+ w.qualifiedIdent(obj)
+ }
+ }
+
+ w.len(len(w.p.cgoPragmas))
+ for _, cgoPragma := range w.p.cgoPragmas {
+ w.strings(cgoPragma)
+ }
+
+ w.sync(syncDecls)
+ for _, p := range noders {
+ for _, decl := range p.file.DeclList {
+ w.pkgDecl(decl)
+ }
+ }
+ w.code(declEnd)
+
+ w.sync(syncEOF)
+}
+
+func (w *writer) pkgDecl(decl syntax.Decl) {
+ switch decl := decl.(type) {
+ default:
+ w.p.unexpected("declaration", decl)
+
+ case *syntax.ImportDecl:
+
+ case *syntax.ConstDecl:
+ w.code(declOther)
+ w.pkgObjs(decl.NameList...)
+
+ case *syntax.FuncDecl:
+ if decl.Name.Value == "_" {
+ break // skip blank functions
+ }
+
+ obj := w.p.info.Defs[decl.Name].(*types2.Func)
+ sig := obj.Type().(*types2.Signature)
+
+ if sig.RParams() != nil || sig.TParams() != nil {
+ break // skip generic functions
+ }
+
+ if recv := sig.Recv(); recv != nil {
+ w.code(declMethod)
+ w.typ(recvBase(recv))
+ w.selector(obj)
+ break
+ }
+
+ w.code(declFunc)
+ w.pkgObjs(decl.Name)
+
+ case *syntax.TypeDecl:
+ if len(decl.TParamList) != 0 {
+ break // skip generic type decls
+ }
+
+ if decl.Name.Value == "_" {
+ break // skip blank type decls
+ }
+
+ name := w.p.info.Defs[decl.Name].(*types2.TypeName)
+ // Skip type declarations for interfaces that are only usable as
+ // type parameter bounds.
+ if iface, ok := name.Type().Underlying().(*types2.Interface); ok && iface.IsConstraint() {
+ break
+ }
+
+ // Skip aliases to uninstantiated generic types.
+ // TODO(mdempsky): Revisit after #46477 is resolved.
+ if name.IsAlias() {
+ named, ok := name.Type().(*types2.Named)
+ if ok && named.TParams().Len() != 0 && named.TArgs().Len() == 0 {
+ break
+ }
+ }
+
+ w.code(declOther)
+ w.pkgObjs(decl.Name)
+
+ case *syntax.VarDecl:
+ w.code(declVar)
+ w.pos(decl)
+ w.pkgObjs(decl.NameList...)
+ w.exprList(decl.Values)
+
+ var embeds []pragmaEmbed
+ if p, ok := decl.Pragma.(*pragmas); ok {
+ embeds = p.Embeds
+ }
+ w.len(len(embeds))
+ for _, embed := range embeds {
+ w.pos(embed.Pos)
+ w.strings(embed.Patterns)
+ }
+ }
+}
+
+func (w *writer) pkgObjs(names ...*syntax.Name) {
+ w.sync(syncDeclNames)
+ w.len(len(names))
+
+ for _, name := range names {
+ obj, ok := w.p.info.Defs[name]
+ assert(ok)
+
+ w.sync(syncDeclName)
+ w.obj(obj, nil)
+ }
+}
+
+// @@@ Helpers
+
+// isDefinedType reports whether obj is a defined type.
+func isDefinedType(obj types2.Object) bool {
+ if obj, ok := obj.(*types2.TypeName); ok {
+ return !obj.IsAlias()
+ }
+ return false
+}
+
+// isGlobal reports whether obj was declared at package scope.
+//
+// Caveat: blank objects are not declared.
+func isGlobal(obj types2.Object) bool {
+ return obj.Parent() == obj.Pkg().Scope()
+}
+
+// lookupObj returns the object that expr refers to, if any. If expr
+// is an explicit instantiation of a generic object, then the type
+// arguments are returned as well.
+func lookupObj(info *types2.Info, expr syntax.Expr) (obj types2.Object, targs *types2.TypeList) {
+ if index, ok := expr.(*syntax.IndexExpr); ok {
+ if inf, ok := info.Inferred[index]; ok {
+ targs = inf.TArgs
+ } else {
+ args := unpackListExpr(index.Index)
+
+ if len(args) == 1 {
+ tv, ok := info.Types[args[0]]
+ assert(ok)
+ if tv.IsValue() {
+ return // normal index expression
+ }
+ }
+
+ list := make([]types2.Type, len(args))
+ for i, arg := range args {
+ tv, ok := info.Types[arg]
+ assert(ok)
+ assert(tv.IsType())
+ list[i] = tv.Type
+ }
+ targs = types2.NewTypeList(list)
+ }
+
+ expr = index.X
+ }
+
+ // Strip package qualifier, if present.
+ if sel, ok := expr.(*syntax.SelectorExpr); ok {
+ if !isPkgQual(info, sel) {
+ return // normal selector expression
+ }
+ expr = sel.Sel
+ }
+
+ if name, ok := expr.(*syntax.Name); ok {
+ obj, _ = info.Uses[name]
+ }
+ return
+}
+
+// isPkgQual reports whether the given selector expression is a
+// package-qualified identifier.
+func isPkgQual(info *types2.Info, sel *syntax.SelectorExpr) bool {
+ if name, ok := sel.X.(*syntax.Name); ok {
+ _, isPkgName := info.Uses[name].(*types2.PkgName)
+ return isPkgName
+ }
+ return false
+}
+
+// recvBase returns the base type for the given receiver parameter.
+func recvBase(recv *types2.Var) *types2.Named {
+ typ := recv.Type()
+ if ptr, ok := typ.(*types2.Pointer); ok {
+ typ = ptr.Elem()
+ }
+ return typ.(*types2.Named)
+}
+
+// namesAsExpr returns a list of names as a syntax.Expr.
+func namesAsExpr(names []*syntax.Name) syntax.Expr {
+ if len(names) == 1 {
+ return names[0]
+ }
+
+ exprs := make([]syntax.Expr, len(names))
+ for i, name := range names {
+ exprs[i] = name
+ }
+ return &syntax.ListExpr{ElemList: exprs}
+}
+
+// fieldIndex returns the index of the struct field named by key.
+func fieldIndex(info *types2.Info, str *types2.Struct, key *syntax.Name) int {
+ field := info.Uses[key].(*types2.Var)
+
+ for i := 0; i < str.NumFields(); i++ {
+ if str.Field(i) == field {
+ return i
+ }
+ }
+
+ panic(fmt.Sprintf("%s: %v is not a field of %v", key.Pos(), field, str))
+}
+
+// objTypeParams returns the type parameters on the given object.
+func objTypeParams(obj types2.Object) *types2.TParamList {
+ switch obj := obj.(type) {
+ case *types2.Func:
+ sig := obj.Type().(*types2.Signature)
+ if sig.Recv() != nil {
+ return sig.RParams()
+ }
+ return sig.TParams()
+ case *types2.TypeName:
+ if !obj.IsAlias() {
+ return obj.Type().(*types2.Named).TParams()
+ }
+ }
+ return nil
+}
+
+func asPragmaFlag(p syntax.Pragma) ir.PragmaFlag {
+ if p == nil {
+ return 0
+ }
+ return p.(*pragmas).Flag
+}
diff --git a/src/cmd/compile/internal/pkginit/initorder.go b/src/cmd/compile/internal/pkginit/initorder.go
index 97d69629fb..0aad63a69f 100644
--- a/src/cmd/compile/internal/pkginit/initorder.go
+++ b/src/cmd/compile/internal/pkginit/initorder.go
@@ -304,7 +304,7 @@ func (d *initDeps) visit(n ir.Node) {
n := n.(*ir.ClosureExpr)
d.inspectList(n.Func.Body)
- case ir.ODOTMETH, ir.OCALLPART, ir.OMETHEXPR:
+ case ir.ODOTMETH, ir.OMETHVALUE, ir.OMETHEXPR:
d.foundDep(ir.MethodExprName(n))
}
}
diff --git a/src/cmd/compile/internal/ppc64/galign.go b/src/cmd/compile/internal/ppc64/galign.go
index 590290fa37..bff3e38f42 100644
--- a/src/cmd/compile/internal/ppc64/galign.go
+++ b/src/cmd/compile/internal/ppc64/galign.go
@@ -20,7 +20,6 @@ func Init(arch *ssagen.ArchInfo) {
arch.ZeroRange = zerorange
arch.Ginsnop = ginsnop
- arch.Ginsnopdefer = ginsnopdefer
arch.SSAMarkMoves = ssaMarkMoves
arch.SSAGenValue = ssaGenValue
diff --git a/src/cmd/compile/internal/ppc64/ggen.go b/src/cmd/compile/internal/ppc64/ggen.go
index c76962cfb8..3ae6422bf9 100644
--- a/src/cmd/compile/internal/ppc64/ggen.go
+++ b/src/cmd/compile/internal/ppc64/ggen.go
@@ -53,30 +53,3 @@ func ginsnop(pp *objw.Progs) *obj.Prog {
p.To.Reg = ppc64.REG_R0
return p
}
-
-func ginsnopdefer(pp *objw.Progs) *obj.Prog {
- // On PPC64 two nops are required in the defer case.
- //
- // (see gc/cgen.go, gc/plive.go -- copy of comment below)
- //
- // On ppc64, when compiling Go into position
- // independent code on ppc64le we insert an
- // instruction to reload the TOC pointer from the
- // stack as well. See the long comment near
- // jmpdefer in runtime/asm_ppc64.s for why.
- // If the MOVD is not needed, insert a hardware NOP
- // so that the same number of instructions are used
- // on ppc64 in both shared and non-shared modes.
-
- ginsnop(pp)
- if base.Ctxt.Flag_shared {
- p := pp.Prog(ppc64.AMOVD)
- p.From.Type = obj.TYPE_MEM
- p.From.Offset = 24
- p.From.Reg = ppc64.REGSP
- p.To.Type = obj.TYPE_REG
- p.To.Reg = ppc64.REG_R2
- return p
- }
- return ginsnop(pp)
-}
diff --git a/src/cmd/compile/internal/reflectdata/alg.go b/src/cmd/compile/internal/reflectdata/alg.go
index 0707e0b61c..36ad389647 100644
--- a/src/cmd/compile/internal/reflectdata/alg.go
+++ b/src/cmd/compile/internal/reflectdata/alg.go
@@ -679,8 +679,7 @@ func EqString(s, t ir.Node) (eqlen *ir.BinaryExpr, eqmem *ir.CallExpr) {
fn := typecheck.LookupRuntime("memequal")
fn = typecheck.SubstArgTypes(fn, types.Types[types.TUINT8], types.Types[types.TUINT8])
- call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, []ir.Node{sptr, tptr, ir.Copy(slen)})
- typecheck.Call(call)
+ call := typecheck.Call(base.Pos, fn, []ir.Node{sptr, tptr, ir.Copy(slen)}, false).(*ir.CallExpr)
cmp := ir.NewBinaryExpr(base.Pos, ir.OEQ, slen, tlen)
cmp = typecheck.Expr(cmp).(*ir.BinaryExpr)
@@ -716,8 +715,7 @@ func EqInterface(s, t ir.Node) (eqtab *ir.BinaryExpr, eqdata *ir.CallExpr) {
sdata.SetTypecheck(1)
tdata.SetTypecheck(1)
- call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, []ir.Node{stab, sdata, tdata})
- typecheck.Call(call)
+ call := typecheck.Call(base.Pos, fn, []ir.Node{stab, sdata, tdata}, false).(*ir.CallExpr)
cmp := ir.NewBinaryExpr(base.Pos, ir.OEQ, stab, ttab)
cmp = typecheck.Expr(cmp).(*ir.BinaryExpr)
diff --git a/src/cmd/compile/internal/reflectdata/reflect.go b/src/cmd/compile/internal/reflectdata/reflect.go
index eb9a8a6c9b..9b9efe04a2 100644
--- a/src/cmd/compile/internal/reflectdata/reflect.go
+++ b/src/cmd/compile/internal/reflectdata/reflect.go
@@ -28,35 +28,27 @@ import (
"cmd/internal/src"
)
-type itabEntry struct {
- t, itype *types.Type
- lsym *obj.LSym // symbol of the itab itself
-
- // symbols of each method in
- // the itab, sorted by byte offset;
- // filled in by CompileITabs
- entries []*obj.LSym
-}
-
type ptabEntry struct {
s *types.Sym
t *types.Type
}
-func CountTabs() (numPTabs, numITabs int) {
- return len(ptabs), len(itabs)
+func CountPTabs() int {
+ return len(ptabs)
}
// runtime interface and reflection data structures
var (
- signatmu sync.Mutex // protects signatset and signatslice
- signatset = make(map[*types.Type]struct{})
- signatslice []*types.Type
+ // protects signatset and signatslice
+ signatmu sync.Mutex
+ // Tracking which types need runtime type descriptor
+ signatset = make(map[*types.Type]struct{})
+ // Queue of types wait to be generated runtime type descriptor
+ signatslice []typeAndStr
gcsymmu sync.Mutex // protects gcsymset and gcsymslice
gcsymset = make(map[*types.Type]struct{})
- itabs []itabEntry
ptabs []*ir.Name
)
@@ -313,6 +305,10 @@ func MapIterType(t *types.Type) *types.Type {
// methods returns the methods of the non-interface type t, sorted by name.
// Generates stub functions as needed.
func methods(t *types.Type) []*typeSig {
+ if t.HasShape() {
+ // Shape types have no methods.
+ return nil
+ }
// method type
mt := types.ReceiverBaseType(t)
@@ -321,13 +317,6 @@ func methods(t *types.Type) []*typeSig {
}
typecheck.CalcMethods(mt)
- // type stored in interface word
- it := t
-
- if !types.IsDirectIface(it) {
- it = types.NewPtr(t)
- }
-
// make list of methods for t,
// generating code if necessary.
var ms []*typeSig
@@ -355,8 +344,8 @@ func methods(t *types.Type) []*typeSig {
sig := &typeSig{
name: f.Sym,
- isym: methodWrapper(it, f),
- tsym: methodWrapper(t, f),
+ isym: methodWrapper(t, f, true),
+ tsym: methodWrapper(t, f, false),
type_: typecheck.NewMethodType(f.Type, t),
mtype: typecheck.NewMethodType(f.Type, nil),
}
@@ -394,7 +383,7 @@ func imethods(t *types.Type) []*typeSig {
// IfaceType.Method is not in the reflect data.
// Generate the method body, so that compiled
// code can refer to it.
- methodWrapper(t, f)
+ methodWrapper(t, f, false)
}
return methods
@@ -735,7 +724,7 @@ func dcommontype(lsym *obj.LSym, t *types.Type) int {
}
exported := false
- p := t.LongString()
+ p := t.NameString()
// If we're writing out type T,
// we are very likely to write out type *T as well.
// Use the string "*T"[1:] for "T", so that the two
@@ -799,11 +788,11 @@ func dcommontype(lsym *obj.LSym, t *types.Type) int {
// TrackSym returns the symbol for tracking use of field/method f, assumed
// to be a member of struct/interface type t.
func TrackSym(t *types.Type, f *types.Field) *obj.LSym {
- return base.PkgLinksym("go.track", t.ShortString()+"."+f.Sym.Name, obj.ABI0)
+ return base.PkgLinksym("go.track", t.LinkString()+"."+f.Sym.Name, obj.ABI0)
}
func TypeSymPrefix(prefix string, t *types.Type) *types.Sym {
- p := prefix + "." + t.ShortString()
+ p := prefix + "." + t.LinkString()
s := types.TypeSymLookup(p)
// This function is for looking up type-related generated functions
@@ -848,16 +837,28 @@ func TypePtr(t *types.Type) *ir.AddrExpr {
return typecheck.Expr(typecheck.NodAddr(n)).(*ir.AddrExpr)
}
-func ITabAddr(t, itype *types.Type) *ir.AddrExpr {
- if t == nil || (t.IsPtr() && t.Elem() == nil) || t.IsUntyped() || !itype.IsInterface() || itype.IsEmptyInterface() {
- base.Fatalf("ITabAddr(%v, %v)", t, itype)
- }
- s, existed := ir.Pkgs.Itab.LookupOK(t.ShortString() + "," + itype.ShortString())
+// ITabLsym returns the LSym representing the itab for concreate type typ
+// implementing interface iface.
+func ITabLsym(typ, iface *types.Type) *obj.LSym {
+ s, existed := ir.Pkgs.Itab.LookupOK(typ.LinkString() + "," + iface.LinkString())
+ lsym := s.Linksym()
+
if !existed {
- itabs = append(itabs, itabEntry{t: t, itype: itype, lsym: s.Linksym()})
+ writeITab(lsym, typ, iface)
}
+ return lsym
+}
+// ITabAddr returns an expression representing a pointer to the itab
+// for concrete type typ implementing interface iface.
+func ITabAddr(typ, iface *types.Type) *ir.AddrExpr {
+ s, existed := ir.Pkgs.Itab.LookupOK(typ.LinkString() + "," + iface.LinkString())
lsym := s.Linksym()
+
+ if !existed {
+ writeITab(lsym, typ, iface)
+ }
+
n := ir.NewLinksymExpr(base.Pos, lsym, types.Types[types.TUINT8])
return typecheck.Expr(typecheck.NodAddr(n)).(*ir.AddrExpr)
}
@@ -945,25 +946,27 @@ func writeType(t *types.Type) *obj.LSym {
if t.IsPtr() && t.Sym() == nil && t.Elem().Sym() != nil {
tbase = t.Elem()
}
+ if tbase.Kind() == types.TFORW {
+ base.Fatalf("unresolved defined type: %v", tbase)
+ }
+
dupok := 0
- if tbase.Sym() == nil {
+ if tbase.Sym() == nil || tbase.HasShape() { // TODO(mdempsky): Probably need DUPOK for instantiated types too.
dupok = obj.DUPOK
}
- if base.Ctxt.Pkgpath != "runtime" || (tbase != types.Types[tbase.Kind()] && tbase != types.ByteType && tbase != types.RuneType && tbase != types.ErrorType) { // int, float, etc
- // named types from other files are defined only by those files
- if tbase.Sym() != nil && tbase.Sym().Pkg != types.LocalPkg {
- if i := typecheck.BaseTypeIndex(t); i >= 0 {
- lsym.Pkg = tbase.Sym().Pkg.Prefix
- lsym.SymIdx = int32(i)
- lsym.Set(obj.AttrIndexed, true)
- }
- return lsym
- }
- // TODO(mdempsky): Investigate whether this can happen.
- if tbase.Kind() == types.TFORW {
- return lsym
+ if !NeedEmit(tbase) {
+ if i := typecheck.BaseTypeIndex(t); i >= 0 {
+ lsym.Pkg = tbase.Sym().Pkg.Prefix
+ lsym.SymIdx = int32(i)
+ lsym.Set(obj.AttrIndexed, true)
}
+
+ // TODO(mdempsky): Investigate whether this still happens.
+ // If we know we don't need to emit code for a type,
+ // we should have a link-symbol index for it.
+ // See also TODO in NeedEmit.
+ return lsym
}
ot := 0
@@ -1226,108 +1229,25 @@ func InterfaceMethodOffset(ityp *types.Type, i int64) int64 {
return int64(commonSize()+4*types.PtrSize+uncommonSize(ityp)) + i*8
}
-// for each itabEntry, gather the methods on
-// the concrete type that implement the interface
-func CompileITabs() {
- for i := range itabs {
- tab := &itabs[i]
- methods := genfun(tab.t, tab.itype)
- if len(methods) == 0 {
- continue
- }
- tab.entries = methods
- }
-}
-
-// for the given concrete type and interface
-// type, return the (sorted) set of methods
-// on the concrete type that implement the interface
-func genfun(t, it *types.Type) []*obj.LSym {
- if t == nil || it == nil {
- return nil
- }
- sigs := imethods(it)
- methods := methods(t)
- out := make([]*obj.LSym, 0, len(sigs))
- // TODO(mdempsky): Short circuit before calling methods(t)?
- // See discussion on CL 105039.
- if len(sigs) == 0 {
- return nil
- }
-
- // both sigs and methods are sorted by name,
- // so we can find the intersect in a single pass
- for _, m := range methods {
- if m.name == sigs[0].name {
- out = append(out, m.isym)
- sigs = sigs[1:]
- if len(sigs) == 0 {
- break
- }
- }
- }
-
- if len(sigs) != 0 {
- base.Fatalf("incomplete itab")
- }
-
- return out
-}
-
-// ITabSym uses the information gathered in
-// CompileITabs to de-virtualize interface methods.
-// Since this is called by the SSA backend, it shouldn't
-// generate additional Nodes, Syms, etc.
-func ITabSym(it *obj.LSym, offset int64) *obj.LSym {
- var syms []*obj.LSym
- if it == nil {
- return nil
- }
-
- for i := range itabs {
- e := &itabs[i]
- if e.lsym == it {
- syms = e.entries
- break
- }
- }
- if syms == nil {
- return nil
- }
-
- // keep this arithmetic in sync with *itab layout
- methodnum := int((offset - 2*int64(types.PtrSize) - 8) / int64(types.PtrSize))
- if methodnum >= len(syms) {
- return nil
- }
- return syms[methodnum]
-}
-
// NeedRuntimeType ensures that a runtime type descriptor is emitted for t.
func NeedRuntimeType(t *types.Type) {
if t.HasTParam() {
- // Generic types don't have a runtime type descriptor (but will
- // have a dictionary)
+ // Generic types don't really exist at run-time and have no runtime
+ // type descriptor. But we do write out shape types.
return
}
if _, ok := signatset[t]; !ok {
signatset[t] = struct{}{}
- signatslice = append(signatslice, t)
+ signatslice = append(signatslice, typeAndStr{t: t, short: types.TypeSymName(t), regular: t.String()})
}
}
func WriteRuntimeTypes() {
- // Process signatset. Use a loop, as writeType adds
- // entries to signatset while it is being processed.
- signats := make([]typeAndStr, len(signatslice))
+ // Process signatslice. Use a loop, as writeType adds
+ // entries to signatslice while it is being processed.
for len(signatslice) > 0 {
- signats = signats[:0]
- // Transfer entries to a slice and sort, for reproducible builds.
- for _, t := range signatslice {
- signats = append(signats, typeAndStr{t: t, short: types.TypeSymName(t), regular: t.String()})
- delete(signatset, t)
- }
- signatslice = signatslice[:0]
+ signats := signatslice
+ // Sort for reproducible builds.
sort.Sort(typesByString(signats))
for _, ts := range signats {
t := ts.t
@@ -1336,6 +1256,7 @@ func WriteRuntimeTypes() {
writeType(types.NewPtr(t))
}
}
+ signatslice = signatslice[len(signats):]
}
// Emit GC data symbols.
@@ -1349,29 +1270,66 @@ func WriteRuntimeTypes() {
}
}
-func WriteTabs() {
- // process itabs
- for _, i := range itabs {
- // dump empty itab symbol into i.sym
- // type itab struct {
- // inter *interfacetype
- // _type *_type
- // hash uint32
- // _ [4]byte
- // fun [1]uintptr // variable sized
- // }
- o := objw.SymPtr(i.lsym, 0, writeType(i.itype), 0)
- o = objw.SymPtr(i.lsym, o, writeType(i.t), 0)
- o = objw.Uint32(i.lsym, o, types.TypeHash(i.t)) // copy of type hash
- o += 4 // skip unused field
- for _, fn := range genfun(i.t, i.itype) {
- o = objw.SymPtrWeak(i.lsym, o, fn, 0) // method pointer for each method
- }
- // Nothing writes static itabs, so they are read only.
- objw.Global(i.lsym, int32(o), int16(obj.DUPOK|obj.RODATA))
- i.lsym.Set(obj.AttrContentAddressable, true)
+// writeITab writes the itab for concrete type typ implementing
+// interface iface.
+func writeITab(lsym *obj.LSym, typ, iface *types.Type) {
+ // TODO(mdempsky): Fix methodWrapper, geneq, and genhash (and maybe
+ // others) to stop clobbering these.
+ oldpos, oldfn := base.Pos, ir.CurFunc
+ defer func() { base.Pos, ir.CurFunc = oldpos, oldfn }()
+
+ if typ == nil || (typ.IsPtr() && typ.Elem() == nil) || typ.IsUntyped() || iface == nil || !iface.IsInterface() || iface.IsEmptyInterface() {
+ base.Fatalf("writeITab(%v, %v)", typ, iface)
}
+ sigs := iface.AllMethods().Slice()
+ entries := make([]*obj.LSym, 0, len(sigs))
+
+ // both sigs and methods are sorted by name,
+ // so we can find the intersection in a single pass
+ for _, m := range methods(typ) {
+ if m.name == sigs[0].Sym {
+ entries = append(entries, m.isym)
+ if m.isym == nil {
+ panic("NO ISYM")
+ }
+ sigs = sigs[1:]
+ if len(sigs) == 0 {
+ break
+ }
+ }
+ if sigs[0].Sym.Name == "==" {
+ sigs = sigs[1:]
+ if len(sigs) == 0 {
+ break
+ }
+ }
+ }
+ if len(sigs) != 0 {
+ base.Fatalf("incomplete itab")
+ }
+
+ // dump empty itab symbol into i.sym
+ // type itab struct {
+ // inter *interfacetype
+ // _type *_type
+ // hash uint32
+ // _ [4]byte
+ // fun [1]uintptr // variable sized
+ // }
+ o := objw.SymPtr(lsym, 0, writeType(iface), 0)
+ o = objw.SymPtr(lsym, o, writeType(typ), 0)
+ o = objw.Uint32(lsym, o, types.TypeHash(typ)) // copy of type hash
+ o += 4 // skip unused field
+ for _, fn := range entries {
+ o = objw.SymPtrWeak(lsym, o, fn, 0) // method pointer for each method
+ }
+ // Nothing writes static itabs, so they are read only.
+ objw.Global(lsym, int32(o), int16(obj.DUPOK|obj.RODATA))
+ lsym.Set(obj.AttrContentAddressable, true)
+}
+
+func WriteTabs() {
// process ptabs
if types.LocalPkg.Name == "main" && len(ptabs) > 0 {
ot := 0
@@ -1453,7 +1411,7 @@ func WriteBasicTypes() {
type typeAndStr struct {
t *types.Type
- short string
+ short string // "short" here means NameString
regular string
}
@@ -1466,8 +1424,13 @@ func (a typesByString) Less(i, j int) bool {
}
// When the only difference between the types is whether
// they refer to byte or uint8, such as **byte vs **uint8,
- // the types' ShortStrings can be identical.
+ // the types' NameStrings can be identical.
// To preserve deterministic sort ordering, sort these by String().
+ //
+ // TODO(mdempsky): This all seems suspect. Using LinkString would
+ // avoid naming collisions, and there shouldn't be a reason to care
+ // about "byte" vs "uint8": they share the same runtime type
+ // descriptor anyway.
if a[i].regular != a[j].regular {
return a[i].regular < a[j].regular
}
@@ -1741,6 +1704,49 @@ func CollectPTabs() {
}
}
+// NeedEmit reports whether typ is a type that we need to emit code
+// for (e.g., runtime type descriptors, method wrappers).
+func NeedEmit(typ *types.Type) bool {
+ // TODO(mdempsky): Export data should keep track of which anonymous
+ // and instantiated types were emitted, so at least downstream
+ // packages can skip re-emitting them.
+ //
+ // Perhaps we can just generalize the linker-symbol indexing to
+ // track the index of arbitrary types, not just defined types, and
+ // use its presence to detect this. The same idea would work for
+ // instantiated generic functions too.
+
+ switch sym := typ.Sym(); {
+ case sym == nil:
+ // Anonymous type; possibly never seen before or ever again.
+ // Need to emit to be safe (however, see TODO above).
+ return true
+
+ case sym.Pkg == types.LocalPkg:
+ // Local defined type; our responsibility.
+ return true
+
+ case base.Ctxt.Pkgpath == "runtime" && (sym.Pkg == types.BuiltinPkg || sym.Pkg == ir.Pkgs.Unsafe):
+ // Package runtime is responsible for including code for builtin
+ // types (predeclared and package unsafe).
+ return true
+
+ case typ.IsFullyInstantiated():
+ // Instantiated type; possibly instantiated with unique type arguments.
+ // Need to emit to be safe (however, see TODO above).
+ return true
+
+ case typ.HasShape():
+ // Shape type; need to emit even though it lives in the .shape package.
+ // TODO: make sure the linker deduplicates them (see dupok in writeType above).
+ return true
+
+ default:
+ // Should have been emitted by an imported package.
+ return false
+ }
+}
+
// Generate a wrapper function to convert from
// a receiver of type T to a receiver of type U.
// That is,
@@ -1761,7 +1767,45 @@ func CollectPTabs() {
//
// rcvr - U
// method - M func (t T)(), a TFIELD type struct
-func methodWrapper(rcvr *types.Type, method *types.Field) *obj.LSym {
+//
+// Also wraps methods on instantiated generic types for use in itab entries.
+// For an instantiated generic type G[int], we generate wrappers like:
+// G[int] pointer shaped:
+// func (x G[int]) f(arg) {
+// .inst.G[int].f(dictionary, x, arg)
+// }
+// G[int] not pointer shaped:
+// func (x *G[int]) f(arg) {
+// .inst.G[int].f(dictionary, *x, arg)
+// }
+// These wrappers are always fully stenciled.
+func methodWrapper(rcvr *types.Type, method *types.Field, forItab bool) *obj.LSym {
+ orig := rcvr
+ if forItab && !types.IsDirectIface(rcvr) {
+ rcvr = rcvr.PtrTo()
+ }
+
+ generic := false
+ // We don't need a dictionary if we are reaching a method (possibly via an
+ // embedded field) which is an interface method.
+ if !types.IsInterfaceMethod(method.Type) {
+ rcvr1 := rcvr
+ if rcvr1.IsPtr() {
+ rcvr1 = rcvr.Elem()
+ }
+ if len(rcvr1.RParams()) > 0 {
+ // If rcvr has rparams, remember method as generic, which
+ // means we need to add a dictionary to the wrapper.
+ generic = true
+ targs := rcvr1.RParams()
+ for _, t := range targs {
+ if t.HasShape() {
+ base.Fatalf("method on type instantiated with shapes targ:%+v rcvr:%+v", t, rcvr)
+ }
+ }
+ }
+ }
+
newnam := ir.MethodSym(rcvr, method.Sym)
lsym := newnam.Linksym()
if newnam.Siggen() {
@@ -1769,19 +1813,18 @@ func methodWrapper(rcvr *types.Type, method *types.Field) *obj.LSym {
}
newnam.SetSiggen(true)
- if types.Identical(rcvr, method.Type.Recv().Type) {
+ // Except in quirks mode, unified IR creates its own wrappers.
+ if base.Debug.Unified != 0 && base.Debug.UnifiedQuirks == 0 {
return lsym
}
- // Only generate (*T).M wrappers for T.M in T's own package.
- if rcvr.IsPtr() && rcvr.Elem() == method.Type.Recv().Type &&
- rcvr.Elem().Sym() != nil && rcvr.Elem().Sym().Pkg != types.LocalPkg {
+ // For generic methods, we need to generate the wrapper even if the receiver
+ // types are identical, because we want to add the dictionary.
+ if !generic && types.Identical(rcvr, method.Type.Recv().Type) {
return lsym
}
- // Only generate I.M wrappers for I in I's own package
- // but keep doing it for error.Error (was issue #29304).
- if rcvr.IsInterface() && rcvr.Sym() != nil && rcvr.Sym().Pkg != types.LocalPkg && rcvr != types.ErrorType {
+ if !NeedEmit(rcvr) || rcvr.IsPtr() && !NeedEmit(rcvr.Elem()) {
return lsym
}
@@ -1802,9 +1845,10 @@ func methodWrapper(rcvr *types.Type, method *types.Field) *obj.LSym {
nthis := ir.AsNode(tfn.Type().Recv().Nname)
methodrcvr := method.Type.Recv().Type
+ indirect := rcvr.IsPtr() && rcvr.Elem() == methodrcvr
// generate nil pointer check for better error
- if rcvr.IsPtr() && rcvr.Elem() == methodrcvr {
+ if indirect {
// generating wrapper from *T to T.
n := ir.NewIfStmt(base.Pos, nil, nil, nil)
n.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, nthis, typecheck.NodNil())
@@ -1814,7 +1858,6 @@ func methodWrapper(rcvr *types.Type, method *types.Field) *obj.LSym {
}
dot := typecheck.AddImplicitDots(ir.NewSelectorExpr(base.Pos, ir.OXDOT, nthis, method.Sym))
-
// generate call
// It's not possible to use a tail call when dynamic linking on ppc64le. The
// bad scenario is when a local call is made to the wrapper: the wrapper will
@@ -1826,7 +1869,7 @@ func methodWrapper(rcvr *types.Type, method *types.Field) *obj.LSym {
// Disable tailcall for RegabiArgs for now. The IR does not connect the
// arguments with the OTAILCALL node, and the arguments are not marshaled
// correctly.
- if !base.Flag.Cfg.Instrumenting && rcvr.IsPtr() && methodrcvr.IsPtr() && method.Embedded != 0 && !types.IsInterfaceMethod(method.Type) && !(base.Ctxt.Arch.Name == "ppc64le" && base.Ctxt.Flag_dynlink) && !buildcfg.Experiment.RegabiArgs {
+ if !base.Flag.Cfg.Instrumenting && rcvr.IsPtr() && methodrcvr.IsPtr() && method.Embedded != 0 && !types.IsInterfaceMethod(method.Type) && !(base.Ctxt.Arch.Name == "ppc64le" && base.Ctxt.Flag_dynlink) && !buildcfg.Experiment.RegabiArgs && !generic {
// generate tail call: adjust pointer receiver and jump to embedded method.
left := dot.X // skip final .M
if !left.Type().IsPtr() {
@@ -1837,8 +1880,68 @@ func methodWrapper(rcvr *types.Type, method *types.Field) *obj.LSym {
fn.Body.Append(ir.NewTailCallStmt(base.Pos, method.Nname.(*ir.Name)))
} else {
fn.SetWrapper(true) // ignore frame for panic+recover matching
- call := ir.NewCallExpr(base.Pos, ir.OCALL, dot, nil)
- call.Args = ir.ParamNames(tfn.Type())
+ var call *ir.CallExpr
+
+ if generic && dot.X != nthis {
+ // TODO: for now, we don't try to generate dictionary wrappers for
+ // any methods involving embedded fields, because we're not
+ // generating the needed dictionaries in instantiateMethods.
+ generic = false
+ }
+
+ if generic {
+ var args []ir.Node
+ var targs []*types.Type
+ if rcvr.IsPtr() {
+ targs = rcvr.Elem().RParams()
+ } else {
+ targs = rcvr.RParams()
+ }
+ // The wrapper for an auto-generated pointer/non-pointer
+ // receiver method should share the same dictionary as the
+ // corresponding original (user-written) method.
+ baseOrig := orig
+ if baseOrig.IsPtr() && !method.Type.Recv().Type.IsPtr() {
+ baseOrig = baseOrig.Elem()
+ } else if !baseOrig.IsPtr() && method.Type.Recv().Type.IsPtr() {
+ baseOrig = types.NewPtr(baseOrig)
+ }
+ args = append(args, getDictionary(ir.MethodSym(baseOrig, method.Sym), targs))
+ if indirect {
+ args = append(args, ir.NewStarExpr(base.Pos, dot.X))
+ } else if methodrcvr.IsPtr() && methodrcvr.Elem() == dot.X.Type() {
+ // Case where method call is via a non-pointer
+ // embedded field with a pointer method.
+ args = append(args, typecheck.NodAddrAt(base.Pos, dot.X))
+ } else {
+ args = append(args, dot.X)
+ }
+ args = append(args, ir.ParamNames(tfn.Type())...)
+
+ // Target method uses shaped names.
+ targs2 := make([]*types.Type, len(targs))
+ for i, t := range targs {
+ targs2[i] = typecheck.Shapify(t)
+ }
+ targs = targs2
+
+ sym := typecheck.MakeFuncInstSym(ir.MethodSym(methodrcvr, method.Sym), targs, true)
+ if sym.Def == nil {
+ // Currently we make sure that we have all the instantiations
+ // we need by generating them all in ../noder/stencil.go:instantiateMethods
+ // TODO: maybe there's a better, more incremental way to generate
+ // only the instantiations we need?
+ base.Fatalf("instantiation %s not found", sym.Name)
+ }
+ target := ir.AsNode(sym.Def)
+ call = ir.NewCallExpr(base.Pos, ir.OCALL, target, args)
+ // Fill-in the generic method node that was not filled in
+ // in instantiateMethod.
+ method.Nname = fn.Nname
+ } else {
+ call = ir.NewCallExpr(base.Pos, ir.OCALL, dot, nil)
+ call.Args = ir.ParamNames(tfn.Type())
+ }
call.IsDDD = tfn.Type().IsVariadic()
if method.Type.NumResults() > 0 {
ret := ir.NewReturnStmt(base.Pos, nil)
@@ -1858,13 +1961,10 @@ func methodWrapper(rcvr *types.Type, method *types.Field) *obj.LSym {
ir.CurFunc = fn
typecheck.Stmts(fn.Body)
- // Inline calls within (*T).M wrappers. This is safe because we only
- // generate those wrappers within the same compilation unit as (T).M.
- // TODO(mdempsky): Investigate why we can't enable this more generally.
- if rcvr.IsPtr() && rcvr.Elem() == method.Type.Recv().Type && rcvr.Elem().Sym() != nil {
+ if AfterGlobalEscapeAnalysis {
inline.InlineCalls(fn)
+ escape.Batch([]*ir.Func{fn}, false)
}
- escape.Batch([]*ir.Func{fn}, false)
ir.CurFunc = nil
typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
@@ -1872,11 +1972,21 @@ func methodWrapper(rcvr *types.Type, method *types.Field) *obj.LSym {
return lsym
}
+// AfterGlobalEscapeAnalysis tracks whether package gc has already
+// performed the main, global escape analysis pass. If so,
+// methodWrapper takes responsibility for escape analyzing any
+// generated wrappers.
+var AfterGlobalEscapeAnalysis bool
+
var ZeroSize int64
// MarkTypeUsedInInterface marks that type t is converted to an interface.
// This information is used in the linker in dead method elimination.
func MarkTypeUsedInInterface(t *types.Type, from *obj.LSym) {
+ if t.HasShape() {
+ // Shape types shouldn't be put in interfaces, so we shouldn't ever get here.
+ base.Fatalf("shape types have no methods %+v", t)
+ }
tsym := TypeLinksym(t)
// Emit a marker relocation. The linker will know the type is converted
// to an interface if "from" is reachable.
@@ -1897,9 +2007,60 @@ func MarkUsedIfaceMethod(n *ir.CallExpr) {
tsym := TypeLinksym(ityp)
r := obj.Addrel(ir.CurFunc.LSym)
r.Sym = tsym
- // dot.Xoffset is the method index * PtrSize (the offset of code pointer
+ // dot.Offset() is the method index * PtrSize (the offset of code pointer
// in itab).
midx := dot.Offset() / int64(types.PtrSize)
r.Add = InterfaceMethodOffset(ityp, midx)
r.Type = objabi.R_USEIFACEMETHOD
}
+
+// MarkUsedIfaceMethodIndex marks that that method number ix (in the AllMethods list)
+// of interface type ityp is used, and should be attached to lsym.
+func MarkUsedIfaceMethodIndex(lsym *obj.LSym, ityp *types.Type, ix int) {
+ tsym := TypeLinksym(ityp)
+ r := obj.Addrel(lsym)
+ r.Sym = tsym
+ r.Add = InterfaceMethodOffset(ityp, int64(ix))
+ r.Type = objabi.R_USEIFACEMETHOD
+}
+
+// getDictionary returns the dictionary for the given named generic function
+// or method, with the given type arguments.
+func getDictionary(gf *types.Sym, targs []*types.Type) ir.Node {
+ if len(targs) == 0 {
+ base.Fatalf("%s should have type arguments", gf.Name)
+ }
+ for _, t := range targs {
+ if t.HasShape() {
+ base.Fatalf("dictionary for %s should only use concrete types: %+v", gf.Name, t)
+ }
+ }
+
+ sym := typecheck.MakeDictSym(gf, targs, true)
+
+ // Initialize the dictionary, if we haven't yet already.
+ if lsym := sym.Linksym(); len(lsym.P) == 0 {
+ base.Fatalf("Dictionary should have already been generated: %s.%s", sym.Pkg.Path, sym.Name)
+ }
+
+ // Make (or reuse) a node referencing the dictionary symbol.
+ var n *ir.Name
+ if sym.Def != nil {
+ n = sym.Def.(*ir.Name)
+ } else {
+ n = typecheck.NewName(sym)
+ n.SetType(types.Types[types.TUINTPTR]) // should probably be [...]uintptr, but doesn't really matter
+ n.SetTypecheck(1)
+ n.Class = ir.PEXTERN
+ sym.Def = n
+ }
+
+ // Return the address of the dictionary.
+ np := typecheck.NodAddr(n)
+ // Note: treat dictionary pointers as uintptrs, so they aren't pointers
+ // with respect to GC. That saves on stack scanning work, write barriers, etc.
+ // We can get away with it because dictionaries are global variables.
+ np.SetType(types.Types[types.TUINTPTR])
+ np.SetTypecheck(1)
+ return np
+}
diff --git a/src/cmd/compile/internal/riscv64/galign.go b/src/cmd/compile/internal/riscv64/galign.go
index 338248a7cf..846ed8fb38 100644
--- a/src/cmd/compile/internal/riscv64/galign.go
+++ b/src/cmd/compile/internal/riscv64/galign.go
@@ -16,7 +16,6 @@ func Init(arch *ssagen.ArchInfo) {
arch.MAXWIDTH = 1 << 50
arch.Ginsnop = ginsnop
- arch.Ginsnopdefer = ginsnop
arch.ZeroRange = zeroRange
arch.SSAMarkMoves = ssaMarkMoves
diff --git a/src/cmd/compile/internal/riscv64/ssa.go b/src/cmd/compile/internal/riscv64/ssa.go
index 64a9b3b33b..d3cbb4ec24 100644
--- a/src/cmd/compile/internal/riscv64/ssa.go
+++ b/src/cmd/compile/internal/riscv64/ssa.go
@@ -282,6 +282,42 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
p.Reg = r1
p.To.Type = obj.TYPE_REG
p.To.Reg = r
+ case ssa.OpRISCV64LoweredMuluhilo:
+ r0 := v.Args[0].Reg()
+ r1 := v.Args[1].Reg()
+ p := s.Prog(riscv.AMULHU)
+ p.From.Type = obj.TYPE_REG
+ p.From.Reg = r1
+ p.Reg = r0
+ p.To.Type = obj.TYPE_REG
+ p.To.Reg = v.Reg0()
+ p1 := s.Prog(riscv.AMUL)
+ p1.From.Type = obj.TYPE_REG
+ p1.From.Reg = r1
+ p1.Reg = r0
+ p1.To.Type = obj.TYPE_REG
+ p1.To.Reg = v.Reg1()
+ case ssa.OpRISCV64LoweredMuluover:
+ r0 := v.Args[0].Reg()
+ r1 := v.Args[1].Reg()
+ p := s.Prog(riscv.AMULHU)
+ p.From.Type = obj.TYPE_REG
+ p.From.Reg = r1
+ p.Reg = r0
+ p.To.Type = obj.TYPE_REG
+ p.To.Reg = v.Reg1()
+ p1 := s.Prog(riscv.AMUL)
+ p1.From.Type = obj.TYPE_REG
+ p1.From.Reg = r1
+ p1.Reg = r0
+ p1.To.Type = obj.TYPE_REG
+ p1.To.Reg = v.Reg0()
+ p2 := s.Prog(riscv.ASNEZ)
+ p2.From.Type = obj.TYPE_REG
+ p2.From.Reg = v.Reg1()
+ p2.To.Type = obj.TYPE_REG
+ p2.To.Reg = v.Reg1()
+
case ssa.OpRISCV64FSQRTS, ssa.OpRISCV64FNEGS, ssa.OpRISCV64FSQRTD, ssa.OpRISCV64FNEGD,
ssa.OpRISCV64FMVSX, ssa.OpRISCV64FMVDX,
ssa.OpRISCV64FCVTSW, ssa.OpRISCV64FCVTSL, ssa.OpRISCV64FCVTWS, ssa.OpRISCV64FCVTLS,
diff --git a/src/cmd/compile/internal/s390x/galign.go b/src/cmd/compile/internal/s390x/galign.go
index b004a2db0a..d880834c22 100644
--- a/src/cmd/compile/internal/s390x/galign.go
+++ b/src/cmd/compile/internal/s390x/galign.go
@@ -16,7 +16,6 @@ func Init(arch *ssagen.ArchInfo) {
arch.ZeroRange = zerorange
arch.Ginsnop = ginsnop
- arch.Ginsnopdefer = ginsnop
arch.SSAMarkMoves = ssaMarkMoves
arch.SSAGenValue = ssaGenValue
diff --git a/src/cmd/compile/internal/ssa/config.go b/src/cmd/compile/internal/ssa/config.go
index a8393a1999..32e3a0860e 100644
--- a/src/cmd/compile/internal/ssa/config.go
+++ b/src/cmd/compile/internal/ssa/config.go
@@ -149,12 +149,6 @@ type Frontend interface {
// for the parts of that compound type.
SplitSlot(parent *LocalSlot, suffix string, offset int64, t *types.Type) LocalSlot
- // DerefItab dereferences an itab function
- // entry, given the symbol of the itab and
- // the byte offset of the function pointer.
- // It may return nil.
- DerefItab(sym *obj.LSym, offset int64) *obj.LSym
-
// Line returns a string describing the given position.
Line(src.XPos) string
@@ -177,7 +171,7 @@ type Frontend interface {
}
// NewConfig returns a new configuration object for the given architecture.
-func NewConfig(arch string, types Types, ctxt *obj.Link, optimize bool) *Config {
+func NewConfig(arch string, types Types, ctxt *obj.Link, optimize, softfloat bool) *Config {
c := &Config{arch: arch, Types: types}
c.useAvg = true
c.useHmul = true
@@ -196,7 +190,7 @@ func NewConfig(arch string, types Types, ctxt *obj.Link, optimize bool) *Config
c.floatParamRegs = paramFloatRegAMD64
c.FPReg = framepointerRegAMD64
c.LinkReg = linkRegAMD64
- c.hasGReg = buildcfg.Experiment.RegabiG
+ c.hasGReg = true
case "386":
c.PtrSize = 4
c.RegSize = 4
@@ -228,6 +222,8 @@ func NewConfig(arch string, types Types, ctxt *obj.Link, optimize bool) *Config
c.registers = registersARM64[:]
c.gpRegMask = gpRegMaskARM64
c.fpRegMask = fpRegMaskARM64
+ c.intParamRegs = paramIntRegARM64
+ c.floatParamRegs = paramFloatRegARM64
c.FPReg = framepointerRegARM64
c.LinkReg = linkRegARM64
c.hasGReg = true
@@ -324,6 +320,10 @@ func NewConfig(arch string, types Types, ctxt *obj.Link, optimize bool) *Config
c.optimize = optimize
c.useSSE = true
c.UseFMA = true
+ c.SoftFloat = softfloat
+ if softfloat {
+ c.floatParamRegs = nil // no FP registers in softfloat mode
+ }
c.ABI0 = abi.NewABIConfig(0, 0, ctxt.FixedFrameSize())
c.ABI1 = abi.NewABIConfig(len(c.intParamRegs), len(c.floatParamRegs), ctxt.FixedFrameSize())
diff --git a/src/cmd/compile/internal/ssa/expand_calls.go b/src/cmd/compile/internal/ssa/expand_calls.go
index 7e973ab205..a8c6c26dad 100644
--- a/src/cmd/compile/internal/ssa/expand_calls.go
+++ b/src/cmd/compile/internal/ssa/expand_calls.go
@@ -215,7 +215,7 @@ func (x *expandState) isAlreadyExpandedAggregateType(t *types.Type) bool {
return false
}
return t.IsStruct() || t.IsArray() || t.IsComplex() || t.IsInterface() || t.IsString() || t.IsSlice() ||
- t.Size() > x.regSize && t.IsInteger()
+ (t.Size() > x.regSize && (t.IsInteger() || (x.f.Config.SoftFloat && t.IsFloat())))
}
// offsetFrom creates an offset from a pointer, simplifying chained offsets and offsets from SP
@@ -380,6 +380,12 @@ func (x *expandState) rewriteSelect(leaf *Value, selector *Value, offset int64,
// The OpLoad was created to load the single field of the IData
// This case removes that StructSelect.
if leafType != selector.Type {
+ if x.f.Config.SoftFloat && selector.Type.IsFloat() {
+ if x.debug {
+ x.Printf("---OpLoad, break\n")
+ }
+ break // softfloat pass will take care of that
+ }
x.f.Fatalf("Unexpected Load as selector, leaf=%s, selector=%s\n", leaf.LongString(), selector.LongString())
}
leaf.copyOf(selector)
@@ -525,11 +531,11 @@ func (x *expandState) rewriteSelect(leaf *Value, selector *Value, offset int64,
case OpComplexReal:
ls := x.rewriteSelect(leaf, selector.Args[0], offset, regOffset)
- locs = x.splitSlots(ls, ".real", 0, leafType)
+ locs = x.splitSlots(ls, ".real", 0, selector.Type)
case OpComplexImag:
- ls := x.rewriteSelect(leaf, selector.Args[0], offset+leafType.Width, regOffset+RO_complex_imag) // result is FloatNN, width of result is offset of imaginary part.
- locs = x.splitSlots(ls, ".imag", leafType.Width, leafType)
+ ls := x.rewriteSelect(leaf, selector.Args[0], offset+selector.Type.Width, regOffset+RO_complex_imag) // result is FloatNN, width of result is offset of imaginary part.
+ locs = x.splitSlots(ls, ".imag", selector.Type.Width, selector.Type)
case OpStringLen, OpSliceLen:
ls := x.rewriteSelect(leaf, selector.Args[0], offset+x.ptrSize, regOffset+RO_slice_len)
diff --git a/src/cmd/compile/internal/ssa/export_test.go b/src/cmd/compile/internal/ssa/export_test.go
index 8ed8a0c4a6..6d3c0f3ccb 100644
--- a/src/cmd/compile/internal/ssa/export_test.go
+++ b/src/cmd/compile/internal/ssa/export_test.go
@@ -39,7 +39,7 @@ func testConfigArch(tb testing.TB, arch string) *Conf {
tb.Fatal("testTypes is 64-bit only")
}
c := &Conf{
- config: NewConfig(arch, testTypes, ctxt, true),
+ config: NewConfig(arch, testTypes, ctxt, true, false),
tb: tb,
}
return c
diff --git a/src/cmd/compile/internal/ssa/gen/AMD64.rules b/src/cmd/compile/internal/ssa/gen/AMD64.rules
index 4cd00732fc..45c0238317 100644
--- a/src/cmd/compile/internal/ssa/gen/AMD64.rules
+++ b/src/cmd/compile/internal/ssa/gen/AMD64.rules
@@ -460,7 +460,7 @@
(IsInBounds idx len) => (SETB (CMPQ idx len))
(IsSliceInBounds idx len) => (SETBE (CMPQ idx len))
(NilCheck ...) => (LoweredNilCheck ...)
-(GetG mem) && !(buildcfg.Experiment.RegabiG && v.Block.Func.OwnAux.Fn.ABI() == obj.ABIInternal) => (LoweredGetG mem) // only lower in old ABI. in new ABI we have a G register.
+(GetG mem) && v.Block.Func.OwnAux.Fn.ABI() != obj.ABIInternal => (LoweredGetG mem) // only lower in old ABI. in new ABI we have a G register.
(GetClosurePtr ...) => (LoweredGetClosurePtr ...)
(GetCallerPC ...) => (LoweredGetCallerPC ...)
(GetCallerSP ...) => (LoweredGetCallerSP ...)
diff --git a/src/cmd/compile/internal/ssa/gen/ARM64.rules b/src/cmd/compile/internal/ssa/gen/ARM64.rules
index 62699f290c..530e48bcb2 100644
--- a/src/cmd/compile/internal/ssa/gen/ARM64.rules
+++ b/src/cmd/compile/internal/ssa/gen/ARM64.rules
@@ -2868,3 +2868,12 @@
&& isInlinableMemmove(dst, src, sz, config)
&& clobber(s1, s2, s3, call)
=> (Move [sz] dst src mem)
+
+// Match post-lowering calls, register version.
+(SelectN [0] call:(CALLstatic {sym} dst src (MOVDconst [sz]) mem))
+ && sz >= 0
+ && isSameCall(sym, "runtime.memmove")
+ && call.Uses == 1
+ && isInlinableMemmove(dst, src, sz, config)
+ && clobber(call)
+ => (Move [sz] dst src mem)
diff --git a/src/cmd/compile/internal/ssa/gen/ARM64Ops.go b/src/cmd/compile/internal/ssa/gen/ARM64Ops.go
index 18a5666b40..5de0b5f020 100644
--- a/src/cmd/compile/internal/ssa/gen/ARM64Ops.go
+++ b/src/cmd/compile/internal/ssa/gen/ARM64Ops.go
@@ -482,9 +482,9 @@ func init() {
{name: "CSETM", argLength: 1, reg: readflags, asm: "CSETM", aux: "CCop"}, // auxint(flags) ? -1 : 0
// function calls
- {name: "CALLstatic", argLength: 1, reg: regInfo{clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call static function aux.(*obj.LSym). arg0=mem, auxint=argsize, returns mem
- {name: "CALLclosure", argLength: 3, reg: regInfo{inputs: []regMask{gpsp, buildReg("R26"), 0}, clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call function via closure. arg0=codeptr, arg1=closure, arg2=mem, auxint=argsize, returns mem
- {name: "CALLinter", argLength: 2, reg: regInfo{inputs: []regMask{gp}, clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call fn by pointer. arg0=codeptr, arg1=mem, auxint=argsize, returns mem
+ {name: "CALLstatic", argLength: -1, reg: regInfo{clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call static function aux.(*obj.LSym). last arg=mem, auxint=argsize, returns mem
+ {name: "CALLclosure", argLength: -1, reg: regInfo{inputs: []regMask{gpsp, buildReg("R26"), 0}, clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call function via closure. arg0=codeptr, arg1=closure, last arg=mem, auxint=argsize, returns mem
+ {name: "CALLinter", argLength: -1, reg: regInfo{inputs: []regMask{gp}, clobbers: callerSave}, aux: "CallOff", clobberFlags: true, call: true}, // call fn by pointer. arg0=codeptr, last arg=mem, auxint=argsize, returns mem
// pseudo-ops
{name: "LoweredNilCheck", argLength: 2, reg: regInfo{inputs: []regMask{gpg}}, nilCheck: true, faultOnNilArg0: true}, // panic if arg0 is nil. arg1=mem.
@@ -759,15 +759,17 @@ func init() {
}
archs = append(archs, arch{
- name: "ARM64",
- pkg: "cmd/internal/obj/arm64",
- genfile: "../../arm64/ssa.go",
- ops: ops,
- blocks: blocks,
- regnames: regNamesARM64,
- gpregmask: gp,
- fpregmask: fp,
- framepointerreg: -1, // not used
- linkreg: int8(num["R30"]),
+ name: "ARM64",
+ pkg: "cmd/internal/obj/arm64",
+ genfile: "../../arm64/ssa.go",
+ ops: ops,
+ blocks: blocks,
+ regnames: regNamesARM64,
+ ParamIntRegNames: "R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15",
+ ParamFloatRegNames: "F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15",
+ gpregmask: gp,
+ fpregmask: fp,
+ framepointerreg: -1, // not used
+ linkreg: int8(num["R30"]),
})
}
diff --git a/src/cmd/compile/internal/ssa/gen/PPC64Ops.go b/src/cmd/compile/internal/ssa/gen/PPC64Ops.go
index f7198b90c3..d7d8a33a0a 100644
--- a/src/cmd/compile/internal/ssa/gen/PPC64Ops.go
+++ b/src/cmd/compile/internal/ssa/gen/PPC64Ops.go
@@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
+//go:build ignore
// +build ignore
package main
diff --git a/src/cmd/compile/internal/ssa/gen/RISCV64.rules b/src/cmd/compile/internal/ssa/gen/RISCV64.rules
index 9cdd62edbe..4eb48e3928 100644
--- a/src/cmd/compile/internal/ssa/gen/RISCV64.rules
+++ b/src/cmd/compile/internal/ssa/gen/RISCV64.rules
@@ -29,6 +29,8 @@
(Sub64F ...) => (FSUBD ...)
(Mul64 ...) => (MUL ...)
+(Mul64uhilo ...) => (LoweredMuluhilo ...)
+(Mul64uover ...) => (LoweredMuluover ...)
(Mul32 ...) => (MULW ...)
(Mul16 x y) => (MULW (SignExt16to32 x) (SignExt16to32 y))
(Mul8 x y) => (MULW (SignExt8to32 x) (SignExt8to32 y))
@@ -586,6 +588,10 @@
(BNEZ (SEQZ x) yes no) => (BEQZ x yes no)
(BNEZ (SNEZ x) yes no) => (BNEZ x yes no)
+// Absorb NEG into branch when possible.
+(BEQZ x:(NEG y) yes no) && x.Uses == 1 => (BEQZ y yes no)
+(BNEZ x:(NEG y) yes no) && x.Uses == 1 => (BNEZ y yes no)
+
// Convert BEQZ/BNEZ into more optimal branch conditions.
(BEQZ (SUB x y) yes no) => (BEQ x y yes no)
(BNEZ (SUB x y) yes no) => (BNE x y yes no)
@@ -594,11 +600,15 @@
(BEQZ (SLTU x y) yes no) => (BGEU x y yes no)
(BNEZ (SLTU x y) yes no) => (BLTU x y yes no)
-// Convert branch with zero to BEQZ/BNEZ.
+// Convert branch with zero to more optimal branch zero.
(BEQ (MOVDconst [0]) cond yes no) => (BEQZ cond yes no)
(BEQ cond (MOVDconst [0]) yes no) => (BEQZ cond yes no)
(BNE (MOVDconst [0]) cond yes no) => (BNEZ cond yes no)
(BNE cond (MOVDconst [0]) yes no) => (BNEZ cond yes no)
+(BLT (MOVDconst [0]) cond yes no) => (BGTZ cond yes no)
+(BLT cond (MOVDconst [0]) yes no) => (BLTZ cond yes no)
+(BGE (MOVDconst [0]) cond yes no) => (BLEZ cond yes no)
+(BGE cond (MOVDconst [0]) yes no) => (BGEZ cond yes no)
// Store zero
(MOVBstore [off] {sym} ptr (MOVDconst [0]) mem) => (MOVBstorezero [off] {sym} ptr mem)
diff --git a/src/cmd/compile/internal/ssa/gen/RISCV64Ops.go b/src/cmd/compile/internal/ssa/gen/RISCV64Ops.go
index 0774d4c654..d36daa8b83 100644
--- a/src/cmd/compile/internal/ssa/gen/RISCV64Ops.go
+++ b/src/cmd/compile/internal/ssa/gen/RISCV64Ops.go
@@ -123,6 +123,7 @@ func init() {
gp01 = regInfo{outputs: []regMask{gpMask}}
gp11 = regInfo{inputs: []regMask{gpMask}, outputs: []regMask{gpMask}}
gp21 = regInfo{inputs: []regMask{gpMask, gpMask}, outputs: []regMask{gpMask}}
+ gp22 = regInfo{inputs: []regMask{gpMask, gpMask}, outputs: []regMask{gpMask, gpMask}}
gpload = regInfo{inputs: []regMask{gpspsbMask, 0}, outputs: []regMask{gpMask}}
gp11sb = regInfo{inputs: []regMask{gpspsbMask}, outputs: []regMask{gpMask}}
gpxchg = regInfo{inputs: []regMask{gpspsbgMask, gpgMask}, outputs: []regMask{gpMask}}
@@ -157,6 +158,9 @@ func init() {
{name: "MULW", argLength: 2, reg: gp21, asm: "MULW", commutative: true, typ: "Int32"},
{name: "MULH", argLength: 2, reg: gp21, asm: "MULH", commutative: true, typ: "Int64"},
{name: "MULHU", argLength: 2, reg: gp21, asm: "MULHU", commutative: true, typ: "UInt64"},
+ {name: "LoweredMuluhilo", argLength: 2, reg: gp22, resultNotInArgs: true}, // arg0 * arg1, return (hi, lo)
+ {name: "LoweredMuluover", argLength: 2, reg: gp22, resultNotInArgs: true}, // arg0 * arg1, return (64 bits of arg0*arg1, overflow)
+
{name: "DIV", argLength: 2, reg: gp21, asm: "DIV", typ: "Int64"}, // arg0 / arg1
{name: "DIVU", argLength: 2, reg: gp21, asm: "DIVU", typ: "UInt64"},
{name: "DIVW", argLength: 2, reg: gp21, asm: "DIVW", typ: "Int32"},
diff --git a/src/cmd/compile/internal/ssa/gen/dec64Ops.go b/src/cmd/compile/internal/ssa/gen/dec64Ops.go
index 8c5883bc56..78fcea885a 100644
--- a/src/cmd/compile/internal/ssa/gen/dec64Ops.go
+++ b/src/cmd/compile/internal/ssa/gen/dec64Ops.go
@@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
+//go:build ignore
// +build ignore
package main
diff --git a/src/cmd/compile/internal/ssa/gen/decOps.go b/src/cmd/compile/internal/ssa/gen/decOps.go
index b826481c9f..d5cd79378c 100644
--- a/src/cmd/compile/internal/ssa/gen/decOps.go
+++ b/src/cmd/compile/internal/ssa/gen/decOps.go
@@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
+//go:build ignore
// +build ignore
package main
diff --git a/src/cmd/compile/internal/ssa/opGen.go b/src/cmd/compile/internal/ssa/opGen.go
index 1c37fbe0db..8ce13abed3 100644
--- a/src/cmd/compile/internal/ssa/opGen.go
+++ b/src/cmd/compile/internal/ssa/opGen.go
@@ -2069,6 +2069,8 @@ const (
OpRISCV64MULW
OpRISCV64MULH
OpRISCV64MULHU
+ OpRISCV64LoweredMuluhilo
+ OpRISCV64LoweredMuluover
OpRISCV64DIV
OpRISCV64DIVU
OpRISCV64DIVW
@@ -20664,7 +20666,7 @@ var opcodeTable = [...]opInfo{
{
name: "CALLstatic",
auxType: auxCallOff,
- argLen: 1,
+ argLen: -1,
clobberFlags: true,
call: true,
reg: regInfo{
@@ -20674,7 +20676,7 @@ var opcodeTable = [...]opInfo{
{
name: "CALLclosure",
auxType: auxCallOff,
- argLen: 3,
+ argLen: -1,
clobberFlags: true,
call: true,
reg: regInfo{
@@ -20688,7 +20690,7 @@ var opcodeTable = [...]opInfo{
{
name: "CALLinter",
auxType: auxCallOff,
- argLen: 2,
+ argLen: -1,
clobberFlags: true,
call: true,
reg: regInfo{
@@ -27604,6 +27606,36 @@ var opcodeTable = [...]opInfo{
},
},
{
+ name: "LoweredMuluhilo",
+ argLen: 2,
+ resultNotInArgs: true,
+ reg: regInfo{
+ inputs: []inputInfo{
+ {0, 1006632948}, // X3 X5 X6 X7 X8 X9 X10 X11 X12 X13 X14 X15 X16 X17 X18 X19 X20 X21 X22 X23 X24 X25 X26 X28 X29 X30
+ {1, 1006632948}, // X3 X5 X6 X7 X8 X9 X10 X11 X12 X13 X14 X15 X16 X17 X18 X19 X20 X21 X22 X23 X24 X25 X26 X28 X29 X30
+ },
+ outputs: []outputInfo{
+ {0, 1006632948}, // X3 X5 X6 X7 X8 X9 X10 X11 X12 X13 X14 X15 X16 X17 X18 X19 X20 X21 X22 X23 X24 X25 X26 X28 X29 X30
+ {1, 1006632948}, // X3 X5 X6 X7 X8 X9 X10 X11 X12 X13 X14 X15 X16 X17 X18 X19 X20 X21 X22 X23 X24 X25 X26 X28 X29 X30
+ },
+ },
+ },
+ {
+ name: "LoweredMuluover",
+ argLen: 2,
+ resultNotInArgs: true,
+ reg: regInfo{
+ inputs: []inputInfo{
+ {0, 1006632948}, // X3 X5 X6 X7 X8 X9 X10 X11 X12 X13 X14 X15 X16 X17 X18 X19 X20 X21 X22 X23 X24 X25 X26 X28 X29 X30
+ {1, 1006632948}, // X3 X5 X6 X7 X8 X9 X10 X11 X12 X13 X14 X15 X16 X17 X18 X19 X20 X21 X22 X23 X24 X25 X26 X28 X29 X30
+ },
+ outputs: []outputInfo{
+ {0, 1006632948}, // X3 X5 X6 X7 X8 X9 X10 X11 X12 X13 X14 X15 X16 X17 X18 X19 X20 X21 X22 X23 X24 X25 X26 X28 X29 X30
+ {1, 1006632948}, // X3 X5 X6 X7 X8 X9 X10 X11 X12 X13 X14 X15 X16 X17 X18 X19 X20 X21 X22 X23 X24 X25 X26 X28 X29 X30
+ },
+ },
+ },
+ {
name: "DIV",
argLen: 2,
asm: riscv.ADIV,
@@ -36400,8 +36432,8 @@ var registersARM64 = [...]Register{
{62, arm64.REG_F31, -1, "F31"},
{63, 0, -1, "SB"},
}
-var paramIntRegARM64 = []int8(nil)
-var paramFloatRegARM64 = []int8(nil)
+var paramIntRegARM64 = []int8{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15}
+var paramFloatRegARM64 = []int8{31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46}
var gpRegMaskARM64 = regMask(670826495)
var fpRegMaskARM64 = regMask(9223372034707292160)
var specialRegMaskARM64 = regMask(0)
diff --git a/src/cmd/compile/internal/ssa/regalloc.go b/src/cmd/compile/internal/ssa/regalloc.go
index 3b90b8769c..28fac6ccd0 100644
--- a/src/cmd/compile/internal/ssa/regalloc.go
+++ b/src/cmd/compile/internal/ssa/regalloc.go
@@ -620,20 +620,20 @@ func (s *regAllocState) init(f *Func) {
}
if s.f.Config.ctxt.Flag_dynlink {
switch s.f.Config.arch {
- case "amd64":
- s.allocatable &^= 1 << 15 // R15
- case "arm":
- s.allocatable &^= 1 << 9 // R9
- case "ppc64le": // R2 already reserved.
- // nothing to do
- case "arm64":
- // nothing to do?
case "386":
// nothing to do.
// Note that for Flag_shared (position independent code)
// we do need to be careful, but that carefulness is hidden
// in the rewrite rules so we always have a free register
// available for global load/stores. See gen/386.rules (search for Flag_shared).
+ case "amd64":
+ s.allocatable &^= 1 << 15 // R15
+ case "arm":
+ s.allocatable &^= 1 << 9 // R9
+ case "arm64":
+ // nothing to do
+ case "ppc64le": // R2 already reserved.
+ // nothing to do
case "s390x":
s.allocatable &^= 1 << 11 // R11
default:
@@ -1865,23 +1865,6 @@ func (s *regAllocState) regalloc(f *Func) {
}
func (s *regAllocState) placeSpills() {
- f := s.f
-
- // Precompute some useful info.
- phiRegs := make([]regMask, f.NumBlocks())
- for _, b := range s.visitOrder {
- var m regMask
- for _, v := range b.Values {
- if v.Op != OpPhi {
- break
- }
- if r, ok := f.getHome(v.ID).(*Register); ok {
- m |= regMask(1) << uint(r.num)
- }
- }
- phiRegs[b.ID] = m
- }
-
mustBeFirst := func(op Op) bool {
return op.isLoweredGetClosurePtr() || op == OpPhi || op == OpArgIntReg || op == OpArgFloatReg
}
diff --git a/src/cmd/compile/internal/ssa/rewrite.go b/src/cmd/compile/internal/ssa/rewrite.go
index 375c4d5a56..115d563933 100644
--- a/src/cmd/compile/internal/ssa/rewrite.go
+++ b/src/cmd/compile/internal/ssa/rewrite.go
@@ -745,27 +745,21 @@ func uaddOvf(a, b int64) bool {
return uint64(a)+uint64(b) < uint64(a)
}
-// de-virtualize an InterCall
-// 'sym' is the symbol for the itab
-func devirt(v *Value, aux Aux, sym Sym, offset int64) *AuxCall {
- f := v.Block.Func
- n, ok := sym.(*obj.LSym)
- if !ok {
+// loadLSymOffset simulates reading a word at an offset into a
+// read-only symbol's runtime memory. If it would read a pointer to
+// another symbol, that symbol is returned. Otherwise, it returns nil.
+func loadLSymOffset(lsym *obj.LSym, offset int64) *obj.LSym {
+ if lsym.Type != objabi.SRODATA {
return nil
}
- lsym := f.fe.DerefItab(n, offset)
- if f.pass.debug > 0 {
- if lsym != nil {
- f.Warnl(v.Pos, "de-virtualizing call")
- } else {
- f.Warnl(v.Pos, "couldn't de-virtualize call")
+
+ for _, r := range lsym.R {
+ if int64(r.Off) == offset && r.Type&^objabi.R_WEAK == objabi.R_ADDR && r.Add == 0 {
+ return r.Sym
}
}
- if lsym == nil {
- return nil
- }
- va := aux.(*AuxCall)
- return StaticAuxCall(lsym, va.abiInfo)
+
+ return nil
}
// de-virtualize an InterLECall
@@ -776,18 +770,14 @@ func devirtLESym(v *Value, aux Aux, sym Sym, offset int64) *obj.LSym {
return nil
}
- f := v.Block.Func
- lsym := f.fe.DerefItab(n, offset)
- if f.pass.debug > 0 {
+ lsym := loadLSymOffset(n, offset)
+ if f := v.Block.Func; f.pass.debug > 0 {
if lsym != nil {
f.Warnl(v.Pos, "de-virtualizing call")
} else {
f.Warnl(v.Pos, "couldn't de-virtualize call")
}
}
- if lsym == nil {
- return nil
- }
return lsym
}
diff --git a/src/cmd/compile/internal/ssa/rewriteAMD64.go b/src/cmd/compile/internal/ssa/rewriteAMD64.go
index 5045ba7351..89d32c0657 100644
--- a/src/cmd/compile/internal/ssa/rewriteAMD64.go
+++ b/src/cmd/compile/internal/ssa/rewriteAMD64.go
@@ -3,7 +3,6 @@
package ssa
-import "internal/buildcfg"
import "math"
import "cmd/internal/obj"
import "cmd/compile/internal/types"
@@ -29339,11 +29338,11 @@ func rewriteValueAMD64_OpFloor(v *Value) bool {
func rewriteValueAMD64_OpGetG(v *Value) bool {
v_0 := v.Args[0]
// match: (GetG mem)
- // cond: !(buildcfg.Experiment.RegabiG && v.Block.Func.OwnAux.Fn.ABI() == obj.ABIInternal)
+ // cond: v.Block.Func.OwnAux.Fn.ABI() != obj.ABIInternal
// result: (LoweredGetG mem)
for {
mem := v_0
- if !(!(buildcfg.Experiment.RegabiG && v.Block.Func.OwnAux.Fn.ABI() == obj.ABIInternal)) {
+ if !(v.Block.Func.OwnAux.Fn.ABI() != obj.ABIInternal) {
break
}
v.reset(OpAMD64LoweredGetG)
diff --git a/src/cmd/compile/internal/ssa/rewriteARM64.go b/src/cmd/compile/internal/ssa/rewriteARM64.go
index 3cdc4d36cb..f7840c5503 100644
--- a/src/cmd/compile/internal/ssa/rewriteARM64.go
+++ b/src/cmd/compile/internal/ssa/rewriteARM64.go
@@ -25997,7 +25997,7 @@ func rewriteValueARM64_OpSelectN(v *Value) bool {
break
}
call := v_0
- if call.Op != OpARM64CALLstatic {
+ if call.Op != OpARM64CALLstatic || len(call.Args) != 1 {
break
}
sym := auxToCall(call.Aux)
@@ -26031,6 +26031,34 @@ func rewriteValueARM64_OpSelectN(v *Value) bool {
v.AddArg3(dst, src, mem)
return true
}
+ // match: (SelectN [0] call:(CALLstatic {sym} dst src (MOVDconst [sz]) mem))
+ // cond: sz >= 0 && isSameCall(sym, "runtime.memmove") && call.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(call)
+ // result: (Move [sz] dst src mem)
+ for {
+ if auxIntToInt64(v.AuxInt) != 0 {
+ break
+ }
+ call := v_0
+ if call.Op != OpARM64CALLstatic || len(call.Args) != 4 {
+ break
+ }
+ sym := auxToCall(call.Aux)
+ mem := call.Args[3]
+ dst := call.Args[0]
+ src := call.Args[1]
+ call_2 := call.Args[2]
+ if call_2.Op != OpARM64MOVDconst {
+ break
+ }
+ sz := auxIntToInt64(call_2.AuxInt)
+ if !(sz >= 0 && isSameCall(sym, "runtime.memmove") && call.Uses == 1 && isInlinableMemmove(dst, src, sz, config) && clobber(call)) {
+ break
+ }
+ v.reset(OpMove)
+ v.AuxInt = int64ToAuxInt(sz)
+ v.AddArg3(dst, src, mem)
+ return true
+ }
return false
}
func rewriteValueARM64_OpSlicemask(v *Value) bool {
diff --git a/src/cmd/compile/internal/ssa/rewriteRISCV64.go b/src/cmd/compile/internal/ssa/rewriteRISCV64.go
index 431fb1aaf6..641be038db 100644
--- a/src/cmd/compile/internal/ssa/rewriteRISCV64.go
+++ b/src/cmd/compile/internal/ssa/rewriteRISCV64.go
@@ -356,6 +356,12 @@ func rewriteValueRISCV64(v *Value) bool {
case OpMul64F:
v.Op = OpRISCV64FMULD
return true
+ case OpMul64uhilo:
+ v.Op = OpRISCV64LoweredMuluhilo
+ return true
+ case OpMul64uover:
+ v.Op = OpRISCV64LoweredMuluover
+ return true
case OpMul8:
return rewriteValueRISCV64_OpMul8(v)
case OpNeg16:
@@ -6096,6 +6102,18 @@ func rewriteBlockRISCV64(b *Block) bool {
b.resetWithControl(BlockRISCV64BEQZ, x)
return true
}
+ // match: (BEQZ x:(NEG y) yes no)
+ // cond: x.Uses == 1
+ // result: (BEQZ y yes no)
+ for b.Controls[0].Op == OpRISCV64NEG {
+ x := b.Controls[0]
+ y := x.Args[0]
+ if !(x.Uses == 1) {
+ break
+ }
+ b.resetWithControl(BlockRISCV64BEQZ, y)
+ return true
+ }
// match: (BEQZ (SUB x y) yes no)
// result: (BEQ x y yes no)
for b.Controls[0].Op == OpRISCV64SUB {
@@ -6123,6 +6141,52 @@ func rewriteBlockRISCV64(b *Block) bool {
b.resetWithControl2(BlockRISCV64BGEU, x, y)
return true
}
+ case BlockRISCV64BGE:
+ // match: (BGE (MOVDconst [0]) cond yes no)
+ // result: (BLEZ cond yes no)
+ for b.Controls[0].Op == OpRISCV64MOVDconst {
+ v_0 := b.Controls[0]
+ if auxIntToInt64(v_0.AuxInt) != 0 {
+ break
+ }
+ cond := b.Controls[1]
+ b.resetWithControl(BlockRISCV64BLEZ, cond)
+ return true
+ }
+ // match: (BGE cond (MOVDconst [0]) yes no)
+ // result: (BGEZ cond yes no)
+ for b.Controls[1].Op == OpRISCV64MOVDconst {
+ cond := b.Controls[0]
+ v_1 := b.Controls[1]
+ if auxIntToInt64(v_1.AuxInt) != 0 {
+ break
+ }
+ b.resetWithControl(BlockRISCV64BGEZ, cond)
+ return true
+ }
+ case BlockRISCV64BLT:
+ // match: (BLT (MOVDconst [0]) cond yes no)
+ // result: (BGTZ cond yes no)
+ for b.Controls[0].Op == OpRISCV64MOVDconst {
+ v_0 := b.Controls[0]
+ if auxIntToInt64(v_0.AuxInt) != 0 {
+ break
+ }
+ cond := b.Controls[1]
+ b.resetWithControl(BlockRISCV64BGTZ, cond)
+ return true
+ }
+ // match: (BLT cond (MOVDconst [0]) yes no)
+ // result: (BLTZ cond yes no)
+ for b.Controls[1].Op == OpRISCV64MOVDconst {
+ cond := b.Controls[0]
+ v_1 := b.Controls[1]
+ if auxIntToInt64(v_1.AuxInt) != 0 {
+ break
+ }
+ b.resetWithControl(BlockRISCV64BLTZ, cond)
+ return true
+ }
case BlockRISCV64BNE:
// match: (BNE (MOVDconst [0]) cond yes no)
// result: (BNEZ cond yes no)
@@ -6163,6 +6227,18 @@ func rewriteBlockRISCV64(b *Block) bool {
b.resetWithControl(BlockRISCV64BNEZ, x)
return true
}
+ // match: (BNEZ x:(NEG y) yes no)
+ // cond: x.Uses == 1
+ // result: (BNEZ y yes no)
+ for b.Controls[0].Op == OpRISCV64NEG {
+ x := b.Controls[0]
+ y := x.Args[0]
+ if !(x.Uses == 1) {
+ break
+ }
+ b.resetWithControl(BlockRISCV64BNEZ, y)
+ return true
+ }
// match: (BNEZ (SUB x y) yes no)
// result: (BNE x y yes no)
for b.Controls[0].Op == OpRISCV64SUB {
diff --git a/src/cmd/compile/internal/ssa/schedule.go b/src/cmd/compile/internal/ssa/schedule.go
index 4e3e5e75e3..c5130b2ee5 100644
--- a/src/cmd/compile/internal/ssa/schedule.go
+++ b/src/cmd/compile/internal/ssa/schedule.go
@@ -220,7 +220,7 @@ func schedule(f *Func) {
// unless they are phi values (which must be first).
// OpArg also goes first -- if it is stack it register allocates
// to a LoadReg, if it is register it is from the beginning anyway.
- if c.Op == OpPhi || c.Op == OpArg {
+ if score[c.ID] == ScorePhi || score[c.ID] == ScoreArg {
continue
}
score[c.ID] = ScoreControl
diff --git a/src/cmd/compile/internal/ssa/softfloat.go b/src/cmd/compile/internal/ssa/softfloat.go
index a8a8f83629..351f824a9f 100644
--- a/src/cmd/compile/internal/ssa/softfloat.go
+++ b/src/cmd/compile/internal/ssa/softfloat.go
@@ -63,6 +63,7 @@ func softfloat(f *Func) {
v.Aux = f.Config.Types.UInt32
case 8:
v.Aux = f.Config.Types.UInt64
+ newInt64 = true
default:
v.Fatalf("bad float type with size %d", size)
}
diff --git a/src/cmd/compile/internal/ssa/writebarrier.go b/src/cmd/compile/internal/ssa/writebarrier.go
index 419d91d0d3..d7510965f6 100644
--- a/src/cmd/compile/internal/ssa/writebarrier.go
+++ b/src/cmd/compile/internal/ssa/writebarrier.go
@@ -552,6 +552,9 @@ func IsStackAddr(v *Value) bool {
// IsGlobalAddr reports whether v is known to be an address of a global (or nil).
func IsGlobalAddr(v *Value) bool {
+ for v.Op == OpOffPtr || v.Op == OpAddPtr || v.Op == OpPtrIndex || v.Op == OpCopy {
+ v = v.Args[0]
+ }
if v.Op == OpAddr && v.Args[0].Op == OpSB {
return true // address of a global
}
diff --git a/src/cmd/compile/internal/ssagen/abi.go b/src/cmd/compile/internal/ssagen/abi.go
index e460adaf95..6d8c53e722 100644
--- a/src/cmd/compile/internal/ssagen/abi.go
+++ b/src/cmd/compile/internal/ssagen/abi.go
@@ -152,6 +152,9 @@ func (s *SymABIs) GenABIWrappers() {
// Apply definitions.
defABI, hasDefABI := s.defs[symName]
if hasDefABI {
+ if len(fn.Body) != 0 {
+ base.ErrorfAt(fn.Pos(), "%v defined in both Go and assembly", fn)
+ }
fn.ABI = defABI
}
diff --git a/src/cmd/compile/internal/ssagen/arch.go b/src/cmd/compile/internal/ssagen/arch.go
index 7215f42c05..483e45cad4 100644
--- a/src/cmd/compile/internal/ssagen/arch.go
+++ b/src/cmd/compile/internal/ssagen/arch.go
@@ -29,8 +29,7 @@ type ArchInfo struct {
// at function entry, and it is ok to clobber registers.
ZeroRange func(*objw.Progs, *obj.Prog, int64, int64, *uint32) *obj.Prog
- Ginsnop func(*objw.Progs) *obj.Prog
- Ginsnopdefer func(*objw.Progs) *obj.Prog // special ginsnop for deferreturn
+ Ginsnop func(*objw.Progs) *obj.Prog
// SSAMarkMoves marks any MOVXconst ops that need to avoid clobbering flags.
SSAMarkMoves func(*State, *ssa.Block)
@@ -42,10 +41,10 @@ type ArchInfo struct {
// for all values in the block before SSAGenBlock.
SSAGenBlock func(s *State, b, next *ssa.Block)
- // LoadRegResults emits instructions that loads register-assigned results
- // into registers. They are already in memory (PPARAMOUT nodes).
- // Used in open-coded defer return path.
- LoadRegResults func(s *State, f *ssa.Func)
+ // LoadRegResult emits instructions that loads register-assigned result
+ // at n+off (n is PPARAMOUT) to register reg. The result is already in
+ // memory. Used in open-coded defer return path.
+ LoadRegResult func(s *State, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog
// SpillArgReg emits instructions that spill reg to n+off.
SpillArgReg func(pp *objw.Progs, p *obj.Prog, f *ssa.Func, t *types.Type, reg int16, n *ir.Name, off int64) *obj.Prog
diff --git a/src/cmd/compile/internal/ssagen/pgen.go b/src/cmd/compile/internal/ssagen/pgen.go
index 62567535d7..93157bfa11 100644
--- a/src/cmd/compile/internal/ssagen/pgen.go
+++ b/src/cmd/compile/internal/ssagen/pgen.go
@@ -114,7 +114,10 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
}
}
- sort.Sort(byStackVar(fn.Dcl))
+ // Use sort.Stable instead of sort.Sort so stack layout (and thus
+ // compiler output) is less sensitive to frontend changes that
+ // introduce or remove unused variables.
+ sort.Stable(byStackVar(fn.Dcl))
// Reassign stack offsets of the locals that are used.
lastHasPtr := false
diff --git a/src/cmd/compile/internal/ssagen/ssa.go b/src/cmd/compile/internal/ssagen/ssa.go
index dfa76006de..39d3b206ac 100644
--- a/src/cmd/compile/internal/ssagen/ssa.go
+++ b/src/cmd/compile/internal/ssagen/ssa.go
@@ -87,8 +87,7 @@ func InitConfig() {
_ = types.NewPtr(types.Types[types.TINT64]) // *int64
_ = types.NewPtr(types.ErrorType) // *error
types.NewPtrCacheEnabled = false
- ssaConfig = ssa.NewConfig(base.Ctxt.Arch.Name, *types_, base.Ctxt, base.Flag.N == 0)
- ssaConfig.SoftFloat = Arch.SoftFloat
+ ssaConfig = ssa.NewConfig(base.Ctxt.Arch.Name, *types_, base.Ctxt, base.Flag.N == 0, Arch.SoftFloat)
ssaConfig.Race = base.Flag.Race
ssaCaches = make([]ssa.Cache, base.Flag.LowerC)
@@ -279,18 +278,6 @@ func regAbiForFuncType(ft *types.Func) bool {
return np > 0 && strings.Contains(ft.Params.FieldType(np-1).String(), magicLastTypeName)
}
-// getParam returns the Field of ith param of node n (which is a
-// function/method/interface call), where the receiver of a method call is
-// considered as the 0th parameter. This does not include the receiver of an
-// interface call.
-func getParam(n *ir.CallExpr, i int) *types.Field {
- t := n.X.Type()
- if n.Op() == ir.OCALLMETH {
- base.Fatalf("OCALLMETH missed by walkCall")
- }
- return t.Params().Field(i)
-}
-
// dvarint writes a varint v to the funcdata in symbol x and returns the new offset
func dvarint(x *obj.LSym, off int, v int64) int {
if v < 0 || v > 1e9 {
@@ -324,66 +311,21 @@ func dvarint(x *obj.LSym, off int, v int64) int {
// for stack variables are specified as the number of bytes below varp (pointer to the
// top of the local variables) for their starting address. The format is:
//
-// - Max total argument size among all the defers
// - Offset of the deferBits variable
// - Number of defers in the function
// - Information about each defer call, in reverse order of appearance in the function:
-// - Total argument size of the call
// - Offset of the closure value to call
-// - Number of arguments (including interface receiver or method receiver as first arg)
-// - Information about each argument
-// - Offset of the stored defer argument in this function's frame
-// - Size of the argument
-// - Offset of where argument should be placed in the args frame when making call
func (s *state) emitOpenDeferInfo() {
x := base.Ctxt.Lookup(s.curfn.LSym.Name + ".opendefer")
s.curfn.LSym.Func().OpenCodedDeferInfo = x
off := 0
-
- // Compute maxargsize (max size of arguments for all defers)
- // first, so we can output it first to the funcdata
- var maxargsize int64
- for i := len(s.openDefers) - 1; i >= 0; i-- {
- r := s.openDefers[i]
- argsize := r.n.X.Type().ArgWidth() // TODO register args: but maybe use of abi0 will make this easy
- if argsize > maxargsize {
- maxargsize = argsize
- }
- }
- off = dvarint(x, off, maxargsize)
off = dvarint(x, off, -s.deferBitsTemp.FrameOffset())
off = dvarint(x, off, int64(len(s.openDefers)))
// Write in reverse-order, for ease of running in that order at runtime
for i := len(s.openDefers) - 1; i >= 0; i-- {
r := s.openDefers[i]
- off = dvarint(x, off, r.n.X.Type().ArgWidth())
off = dvarint(x, off, -r.closureNode.FrameOffset())
- numArgs := len(r.argNodes)
- if r.rcvrNode != nil {
- // If there's an interface receiver, treat/place it as the first
- // arg. (If there is a method receiver, it's already included as
- // first arg in r.argNodes.)
- numArgs++
- }
- off = dvarint(x, off, int64(numArgs))
- argAdjust := 0 // presence of receiver offsets the parameter count.
- if r.rcvrNode != nil {
- off = dvarint(x, off, -okOffset(r.rcvrNode.FrameOffset()))
- off = dvarint(x, off, s.config.PtrSize)
- off = dvarint(x, off, 0) // This is okay because defer records use ABI0 (for now)
- argAdjust++
- }
-
- // TODO(register args) assume abi0 for this?
- ab := s.f.ABI0
- pri := ab.ABIAnalyzeFuncType(r.n.X.Type().FuncType())
- for j, arg := range r.argNodes {
- f := getParam(r.n, j)
- off = dvarint(x, off, -okOffset(arg.FrameOffset()))
- off = dvarint(x, off, f.Type.Size())
- off = dvarint(x, off, okOffset(pri.InParam(j+argAdjust).FrameOffset(pri)))
- }
}
}
@@ -580,7 +522,7 @@ func buildssa(fn *ir.Func, worker int) *ssa.Func {
}
// Populate closure variables.
- if !fn.ClosureCalled() {
+ if fn.Needctxt() {
clo := s.entryNewValue0(ssa.OpGetClosurePtr, s.f.Config.Types.BytePtr)
offset := int64(types.PtrSize) // PtrSize to skip past function entry PC field
for _, n := range fn.ClosureVars {
@@ -650,7 +592,6 @@ func buildssa(fn *ir.Func, worker int) *ssa.Func {
// it mimics the behavior of the former ABI (everything stored) and because it's not 100%
// clear if naming conventions are respected in autogenerated code.
// TODO figure out exactly what's unused, don't spill it. Make liveness fine-grained, also.
- // TODO non-amd64 architectures have link registers etc that may require adjustment here.
for _, p := range params.InParams() {
typs, offs := p.RegisterTypesAndOffsets()
for i, t := range typs {
@@ -865,16 +806,6 @@ type openDeferInfo struct {
// function, method, or interface call, to store a closure that panic
// processing can use for this defer.
closureNode *ir.Name
- // If defer call is interface call, the address of the argtmp where the
- // receiver is stored
- rcvr *ssa.Value
- // The node representing the argtmp where the receiver is stored
- rcvrNode *ir.Name
- // The addresses of the argtmps where the evaluated arguments of the defer
- // function call are stored.
- argVals []*ssa.Value
- // The nodes representing the argtmps where the args of the defer are stored
- argNodes []*ir.Name
}
type state struct {
@@ -1491,7 +1422,12 @@ func (s *state) stmt(n ir.Node) {
case ir.OAS2DOTTYPE:
n := n.(*ir.AssignListStmt)
- res, resok := s.dottype(n.Rhs[0].(*ir.TypeAssertExpr), true)
+ var res, resok *ssa.Value
+ if n.Rhs[0].Op() == ir.ODOTTYPE2 {
+ res, resok = s.dottype(n.Rhs[0].(*ir.TypeAssertExpr), true)
+ } else {
+ res, resok = s.dynamicDottype(n.Rhs[0].(*ir.DynamicTypeAssertExpr), true)
+ }
deref := false
if !TypeOK(n.Rhs[0].Type()) {
if res.Op != ssa.OpLoad {
@@ -2748,6 +2684,11 @@ func (s *state) expr(n ir.Node) *ssa.Value {
res, _ := s.dottype(n, false)
return res
+ case ir.ODYNAMICDOTTYPE:
+ n := n.(*ir.DynamicTypeAssertExpr)
+ res, _ := s.dynamicDottype(n, false)
+ return res
+
// binary ops
case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
n := n.(*ir.BinaryExpr)
@@ -3183,7 +3124,7 @@ func (s *state) expr(n ir.Node) *ssa.Value {
}
fallthrough
- case ir.OCALLINTER, ir.OCALLMETH:
+ case ir.OCALLINTER:
n := n.(*ir.CallExpr)
return s.callResult(n, callNormal)
@@ -3191,6 +3132,14 @@ func (s *state) expr(n ir.Node) *ssa.Value {
n := n.(*ir.CallExpr)
return s.newValue1(ssa.OpGetG, n.Type(), s.mem())
+ case ir.OGETCALLERPC:
+ n := n.(*ir.CallExpr)
+ return s.newValue0(ssa.OpGetCallerPC, n.Type())
+
+ case ir.OGETCALLERSP:
+ n := n.(*ir.CallExpr)
+ return s.newValue0(ssa.OpGetCallerSP, n.Type())
+
case ir.OAPPEND:
return s.append(n.(*ir.CallExpr), false)
@@ -3703,6 +3652,16 @@ func softfloatInit() {
// TODO: do not emit sfcall if operation can be optimized to constant in later
// opt phase
func (s *state) sfcall(op ssa.Op, args ...*ssa.Value) (*ssa.Value, bool) {
+ f2i := func(t *types.Type) *types.Type {
+ switch t.Kind() {
+ case types.TFLOAT32:
+ return types.Types[types.TUINT32]
+ case types.TFLOAT64:
+ return types.Types[types.TUINT64]
+ }
+ return t
+ }
+
if callDef, ok := softFloatOps[op]; ok {
switch op {
case ssa.OpLess32F,
@@ -3715,7 +3674,19 @@ func (s *state) sfcall(op ssa.Op, args ...*ssa.Value) (*ssa.Value, bool) {
args[1] = s.newValue1(s.ssaOp(ir.ONEG, types.Types[callDef.rtype]), args[1].Type, args[1])
}
- result := s.rtcall(callDef.rtfn, true, []*types.Type{types.Types[callDef.rtype]}, args...)[0]
+ // runtime functions take uints for floats and returns uints.
+ // Convert to uints so we use the right calling convention.
+ for i, a := range args {
+ if a.Type.IsFloat() {
+ args[i] = s.newValue1(ssa.OpCopy, f2i(a.Type), a)
+ }
+ }
+
+ rt := types.Types[callDef.rtype]
+ result := s.rtcall(callDef.rtfn, true, []*types.Type{f2i(rt)}, args...)[0]
+ if rt.IsFloat() {
+ result = s.newValue1(ssa.OpCopy, rt, result)
+ }
if op == ssa.OpNeq32F || op == ssa.OpNeq64F {
result = s.newValue1(ssa.OpNot, result.Type, result)
}
@@ -3808,7 +3779,7 @@ func InitTables() {
}
return s.newValue2(ssa.OpMul64uover, types.NewTuple(types.Types[types.TUINT], types.Types[types.TUINT]), args[0], args[1])
},
- sys.AMD64, sys.I386, sys.MIPS64)
+ sys.AMD64, sys.I386, sys.MIPS64, sys.RISCV64)
add("runtime", "KeepAlive",
func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
data := s.newValue1(ssa.OpIData, s.f.Config.Types.BytePtr, args[0])
@@ -4534,9 +4505,9 @@ func InitTables() {
func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
return s.newValue2(ssa.OpMul64uhilo, types.NewTuple(types.Types[types.TUINT64], types.Types[types.TUINT64]), args[0], args[1])
},
- sys.AMD64, sys.ARM64, sys.PPC64, sys.S390X, sys.MIPS64)
- alias("math/bits", "Mul", "math/bits", "Mul64", sys.ArchAMD64, sys.ArchARM64, sys.ArchPPC64, sys.ArchPPC64LE, sys.ArchS390X, sys.ArchMIPS64, sys.ArchMIPS64LE)
- alias("runtime/internal/math", "Mul64", "math/bits", "Mul64", sys.ArchAMD64, sys.ArchARM64, sys.ArchPPC64, sys.ArchPPC64LE, sys.ArchS390X, sys.ArchMIPS64, sys.ArchMIPS64LE)
+ sys.AMD64, sys.ARM64, sys.PPC64, sys.S390X, sys.MIPS64, sys.RISCV64)
+ alias("math/bits", "Mul", "math/bits", "Mul64", sys.ArchAMD64, sys.ArchARM64, sys.ArchPPC64, sys.ArchPPC64LE, sys.ArchS390X, sys.ArchMIPS64, sys.ArchMIPS64LE, sys.ArchRISCV64)
+ alias("runtime/internal/math", "Mul64", "math/bits", "Mul64", sys.ArchAMD64, sys.ArchARM64, sys.ArchPPC64, sys.ArchPPC64LE, sys.ArchS390X, sys.ArchMIPS64, sys.ArchMIPS64LE, sys.ArchRISCV64)
addF("math/bits", "Add64",
func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
return s.newValue3(ssa.OpAdd64carry, types.NewTuple(types.Types[types.TUINT64], types.Types[types.TUINT64]), args[0], args[1], args[2])
@@ -4687,17 +4658,14 @@ func (s *state) intrinsicArgs(n *ir.CallExpr) []*ssa.Value {
return args
}
-// openDeferRecord adds code to evaluate and store the args for an open-code defer
+// openDeferRecord adds code to evaluate and store the function for an open-code defer
// call, and records info about the defer, so we can generate proper code on the
// exit paths. n is the sub-node of the defer node that is the actual function
-// call. We will also record funcdata information on where the args are stored
+// call. We will also record funcdata information on where the function is stored
// (as well as the deferBits variable), and this will enable us to run the proper
// defer calls during panics.
func (s *state) openDeferRecord(n *ir.CallExpr) {
- var args []*ssa.Value
- var argNodes []*ir.Name
-
- if buildcfg.Experiment.RegabiDefer && (len(n.Args) != 0 || n.Op() == ir.OCALLINTER || n.X.Type().NumResults() != 0) {
+ if len(n.Args) != 0 || n.Op() != ir.OCALLFUNC || n.X.Type().NumResults() != 0 {
s.Fatalf("defer call with arguments or results: %v", n)
}
@@ -4705,48 +4673,20 @@ func (s *state) openDeferRecord(n *ir.CallExpr) {
n: n,
}
fn := n.X
- if n.Op() == ir.OCALLFUNC {
- // We must always store the function value in a stack slot for the
- // runtime panic code to use. But in the defer exit code, we will
- // call the function directly if it is a static function.
- closureVal := s.expr(fn)
- closure := s.openDeferSave(nil, fn.Type(), closureVal)
- opendefer.closureNode = closure.Aux.(*ir.Name)
- if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC) {
- opendefer.closure = closure
- }
- } else if n.Op() == ir.OCALLMETH {
- base.Fatalf("OCALLMETH missed by walkCall")
- } else {
- if fn.Op() != ir.ODOTINTER {
- base.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op())
- }
- fn := fn.(*ir.SelectorExpr)
- closure, rcvr := s.getClosureAndRcvr(fn)
- opendefer.closure = s.openDeferSave(nil, closure.Type, closure)
- // Important to get the receiver type correct, so it is recognized
- // as a pointer for GC purposes.
- opendefer.rcvr = s.openDeferSave(nil, fn.Type().Recv().Type, rcvr)
- opendefer.closureNode = opendefer.closure.Aux.(*ir.Name)
- opendefer.rcvrNode = opendefer.rcvr.Aux.(*ir.Name)
- }
- for _, argn := range n.Args {
- var v *ssa.Value
- if TypeOK(argn.Type()) {
- v = s.openDeferSave(nil, argn.Type(), s.expr(argn))
- } else {
- v = s.openDeferSave(argn, argn.Type(), nil)
- }
- args = append(args, v)
- argNodes = append(argNodes, v.Aux.(*ir.Name))
+ // We must always store the function value in a stack slot for the
+ // runtime panic code to use. But in the defer exit code, we will
+ // call the function directly if it is a static function.
+ closureVal := s.expr(fn)
+ closure := s.openDeferSave(fn.Type(), closureVal)
+ opendefer.closureNode = closure.Aux.(*ir.Name)
+ if !(fn.Op() == ir.ONAME && fn.(*ir.Name).Class == ir.PFUNC) {
+ opendefer.closure = closure
}
- opendefer.argVals = args
- opendefer.argNodes = argNodes
index := len(s.openDefers)
s.openDefers = append(s.openDefers, opendefer)
// Update deferBits only after evaluation and storage to stack of
- // args/receiver/interface is successful.
+ // the function is successful.
bitvalue := s.constInt8(types.Types[types.TUINT8], 1<<uint(index))
newDeferBits := s.newValue2(ssa.OpOr8, types.Types[types.TUINT8], s.variable(deferBitsVar, types.Types[types.TUINT8]), bitvalue)
s.vars[deferBitsVar] = newDeferBits
@@ -4755,57 +4695,47 @@ func (s *state) openDeferRecord(n *ir.CallExpr) {
// openDeferSave generates SSA nodes to store a value (with type t) for an
// open-coded defer at an explicit autotmp location on the stack, so it can be
-// reloaded and used for the appropriate call on exit. If type t is SSAable, then
-// val must be non-nil (and n should be nil) and val is the value to be stored. If
-// type t is non-SSAable, then n must be non-nil (and val should be nil) and n is
-// evaluated (via s.addr() below) to get the value that is to be stored. The
-// function returns an SSA value representing a pointer to the autotmp location.
-func (s *state) openDeferSave(n ir.Node, t *types.Type, val *ssa.Value) *ssa.Value {
- canSSA := TypeOK(t)
- var pos src.XPos
- if canSSA {
- pos = val.Pos
- } else {
- pos = n.Pos()
+// reloaded and used for the appropriate call on exit. Type t must be a function type
+// (therefore SSAable). val is the value to be stored. The function returns an SSA
+// value representing a pointer to the autotmp location.
+func (s *state) openDeferSave(t *types.Type, val *ssa.Value) *ssa.Value {
+ if !TypeOK(t) {
+ s.Fatalf("openDeferSave of non-SSA-able type %v val=%v", t, val)
}
- argTemp := typecheck.TempAt(pos.WithNotStmt(), s.curfn, t)
- argTemp.SetOpenDeferSlot(true)
- var addrArgTemp *ssa.Value
- // Use OpVarLive to make sure stack slots for the args, etc. are not
- // removed by dead-store elimination
+ if !t.HasPointers() {
+ s.Fatalf("openDeferSave of pointerless type %v val=%v", t, val)
+ }
+ pos := val.Pos
+ temp := typecheck.TempAt(pos.WithNotStmt(), s.curfn, t)
+ temp.SetOpenDeferSlot(true)
+ var addrTemp *ssa.Value
+ // Use OpVarLive to make sure stack slot for the closure is not removed by
+ // dead-store elimination
if s.curBlock.ID != s.f.Entry.ID {
- // Force the argtmp storing this defer function/receiver/arg to be
- // declared in the entry block, so that it will be live for the
- // defer exit code (which will actually access it only if the
- // associated defer call has been activated).
- s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarDef, types.TypeMem, argTemp, s.defvars[s.f.Entry.ID][memVar])
- s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarLive, types.TypeMem, argTemp, s.defvars[s.f.Entry.ID][memVar])
- addrArgTemp = s.f.Entry.NewValue2A(src.NoXPos, ssa.OpLocalAddr, types.NewPtr(argTemp.Type()), argTemp, s.sp, s.defvars[s.f.Entry.ID][memVar])
+ // Force the tmp storing this defer function to be declared in the entry
+ // block, so that it will be live for the defer exit code (which will
+ // actually access it only if the associated defer call has been activated).
+ s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarDef, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
+ s.defvars[s.f.Entry.ID][memVar] = s.f.Entry.NewValue1A(src.NoXPos, ssa.OpVarLive, types.TypeMem, temp, s.defvars[s.f.Entry.ID][memVar])
+ addrTemp = s.f.Entry.NewValue2A(src.NoXPos, ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.defvars[s.f.Entry.ID][memVar])
} else {
// Special case if we're still in the entry block. We can't use
// the above code, since s.defvars[s.f.Entry.ID] isn't defined
// until we end the entry block with s.endBlock().
- s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, argTemp, s.mem(), false)
- s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, argTemp, s.mem(), false)
- addrArgTemp = s.newValue2Apos(ssa.OpLocalAddr, types.NewPtr(argTemp.Type()), argTemp, s.sp, s.mem(), false)
- }
- if t.HasPointers() {
- // Since we may use this argTemp during exit depending on the
- // deferBits, we must define it unconditionally on entry.
- // Therefore, we must make sure it is zeroed out in the entry
- // block if it contains pointers, else GC may wrongly follow an
- // uninitialized pointer value.
- argTemp.SetNeedzero(true)
- }
- if !canSSA {
- a := s.addr(n)
- s.move(t, addrArgTemp, a)
- return addrArgTemp
- }
+ s.vars[memVar] = s.newValue1Apos(ssa.OpVarDef, types.TypeMem, temp, s.mem(), false)
+ s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, temp, s.mem(), false)
+ addrTemp = s.newValue2Apos(ssa.OpLocalAddr, types.NewPtr(temp.Type()), temp, s.sp, s.mem(), false)
+ }
+ // Since we may use this temp during exit depending on the
+ // deferBits, we must define it unconditionally on entry.
+ // Therefore, we must make sure it is zeroed out in the entry
+ // block if it contains pointers, else GC may wrongly follow an
+ // uninitialized pointer value.
+ temp.SetNeedzero(true)
// We are storing to the stack, hence we can avoid the full checks in
// storeType() (no write barrier) and do a simple store().
- s.store(t, addrArgTemp, val)
- return addrArgTemp
+ s.store(t, addrTemp, val)
+ return addrTemp
}
// openDeferExit generates SSA for processing all the open coded defers at exit.
@@ -4849,45 +4779,26 @@ func (s *state) openDeferExit() {
s.vars[deferBitsVar] = maskedval
// Generate code to call the function call of the defer, using the
- // closure/receiver/args that were stored in argtmps at the point
- // of the defer statement.
+ // closure that were stored in argtmps at the point of the defer
+ // statement.
fn := r.n.X
stksize := fn.Type().ArgWidth()
- var ACArgs []*types.Type
- var ACResults []*types.Type
var callArgs []*ssa.Value
- if r.rcvr != nil {
- // rcvr in case of OCALLINTER
- v := s.load(r.rcvr.Type.Elem(), r.rcvr)
- ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
- callArgs = append(callArgs, v)
- }
- for j, argAddrVal := range r.argVals {
- f := getParam(r.n, j)
- ACArgs = append(ACArgs, f.Type)
- var a *ssa.Value
- if !TypeOK(f.Type) {
- a = s.newValue2(ssa.OpDereference, f.Type, argAddrVal, s.mem())
- } else {
- a = s.load(f.Type, argAddrVal)
- }
- callArgs = append(callArgs, a)
- }
var call *ssa.Value
if r.closure != nil {
v := s.load(r.closure.Type.Elem(), r.closure)
s.maybeNilCheckClosure(v, callDefer)
codeptr := s.rawLoad(types.Types[types.TUINTPTR], v)
- aux := ssa.ClosureAuxCall(s.f.ABIDefault.ABIAnalyzeTypes(nil, ACArgs, ACResults))
+ aux := ssa.ClosureAuxCall(s.f.ABIDefault.ABIAnalyzeTypes(nil, nil, nil))
call = s.newValue2A(ssa.OpClosureLECall, aux.LateExpansionResultType(), aux, codeptr, v)
} else {
- aux := ssa.StaticAuxCall(fn.(*ir.Name).Linksym(), s.f.ABIDefault.ABIAnalyzeTypes(nil, ACArgs, ACResults))
+ aux := ssa.StaticAuxCall(fn.(*ir.Name).Linksym(), s.f.ABIDefault.ABIAnalyzeTypes(nil, nil, nil))
call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
}
callArgs = append(callArgs, s.mem())
call.AddArgs(callArgs...)
call.AuxInt = stksize
- s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call)
+ s.vars[memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, 0, call)
// Make sure that the stack slots with pointers are kept live
// through the call (which is a pre-emption point). Also, we will
// use the first call of the last defer exit to compute liveness
@@ -4895,16 +4806,6 @@ func (s *state) openDeferExit() {
if r.closureNode != nil {
s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, r.closureNode, s.mem(), false)
}
- if r.rcvrNode != nil {
- if r.rcvrNode.Type().HasPointers() {
- s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, r.rcvrNode, s.mem(), false)
- }
- }
- for _, argNode := range r.argNodes {
- if argNode.Type().HasPointers() {
- s.vars[memVar] = s.newValue1Apos(ssa.OpVarLive, types.TypeMem, argNode, s.mem(), false)
- }
- }
s.endBlock()
s.startBlock(bEnd)
@@ -4952,7 +4853,7 @@ func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Val
}
}
- if buildcfg.Experiment.RegabiDefer && k != callNormal && (len(n.Args) != 0 || n.Op() == ir.OCALLINTER || n.X.Type().NumResults() != 0) {
+ if k != callNormal && (len(n.Args) != 0 || n.Op() == ir.OCALLINTER || n.X.Type().NumResults() != 0) {
s.Fatalf("go/defer call with arguments: %v", n)
}
@@ -4986,8 +4887,6 @@ func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Val
// not the point of defer statement.
s.maybeNilCheckClosure(closure, k)
}
- case ir.OCALLMETH:
- base.Fatalf("OCALLMETH missed by walkCall")
case ir.OCALLINTER:
if fn.Op() != ir.ODOTINTER {
s.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op())
@@ -5023,55 +4922,31 @@ func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Val
var call *ssa.Value
if k == callDeferStack {
// Make a defer struct d on the stack.
- t := deferstruct(stksize)
+ if stksize != 0 {
+ s.Fatalf("deferprocStack with non-zero stack size %d: %v", stksize, n)
+ }
+
+ t := deferstruct()
d := typecheck.TempAt(n.Pos(), s.curfn, t)
s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, d, s.mem())
addr := s.addr(d)
- // Must match reflect.go:deferstruct and src/runtime/runtime2.go:_defer.
- // 0: siz
- s.store(types.Types[types.TUINT32],
- s.newValue1I(ssa.OpOffPtr, types.Types[types.TUINT32].PtrTo(), t.FieldOff(0), addr),
- s.constInt32(types.Types[types.TUINT32], int32(stksize)))
- // 1: started, set in deferprocStack
- // 2: heap, set in deferprocStack
- // 3: openDefer
- // 4: sp, set in deferprocStack
- // 5: pc, set in deferprocStack
- // 6: fn
+ // Must match deferstruct() below and src/runtime/runtime2.go:_defer.
+ // 0: started, set in deferprocStack
+ // 1: heap, set in deferprocStack
+ // 2: openDefer
+ // 3: sp, set in deferprocStack
+ // 4: pc, set in deferprocStack
+ // 5: fn
s.store(closure.Type,
- s.newValue1I(ssa.OpOffPtr, closure.Type.PtrTo(), t.FieldOff(6), addr),
+ s.newValue1I(ssa.OpOffPtr, closure.Type.PtrTo(), t.FieldOff(5), addr),
closure)
- // 7: panic, set in deferprocStack
- // 8: link, set in deferprocStack
- // 9: framepc
- // 10: varp
- // 11: fd
-
- // Then, store all the arguments of the defer call.
- ft := fn.Type()
- off := t.FieldOff(12) // TODO register args: be sure this isn't a hardcoded param stack offset.
- args := n.Args
- i0 := 0
-
- // Set receiver (for interface calls). Always a pointer.
- if rcvr != nil {
- p := s.newValue1I(ssa.OpOffPtr, ft.Recv().Type.PtrTo(), off, addr)
- s.store(types.Types[types.TUINTPTR], p, rcvr)
- i0 = 1
- }
- // Set receiver (for method calls).
- if n.Op() == ir.OCALLMETH {
- base.Fatalf("OCALLMETH missed by walkCall")
- }
- // Set other args.
- // This code is only used when RegabiDefer is not enabled, and arguments are always
- // passed on stack.
- for i, f := range ft.Params().Fields().Slice() {
- s.storeArgWithBase(args[0], f.Type, addr, off+params.InParam(i+i0).FrameOffset(params))
- args = args[1:]
- }
+ // 6: panic, set in deferprocStack
+ // 7: link, set in deferprocStack
+ // 8: fd
+ // 9: varp
+ // 10: framepc
// Call runtime.deferprocStack with pointer to _defer record.
ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
@@ -5079,26 +4954,18 @@ func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Val
callArgs = append(callArgs, addr, s.mem())
call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux)
call.AddArgs(callArgs...)
- if stksize < int64(types.PtrSize) {
- // We need room for both the call to deferprocStack and the call to
- // the deferred function.
- stksize = int64(types.PtrSize)
- }
- call.AuxInt = stksize
+ call.AuxInt = int64(types.PtrSize) // deferprocStack takes a *_defer arg
} else {
// Store arguments to stack, including defer/go arguments and receiver for method calls.
// These are written in SP-offset order.
argStart := base.Ctxt.FixedFrameSize()
// Defer/go args.
if k != callNormal {
- // Write argsize and closure (args to newproc/deferproc).
- argsize := s.constInt32(types.Types[types.TUINT32], int32(stksize))
- ACArgs = append(ACArgs, types.Types[types.TUINT32]) // not argExtra
- callArgs = append(callArgs, argsize)
- ACArgs = append(ACArgs, types.Types[types.TUINTPTR])
+ // Write closure (arg to newproc/deferproc).
+ ACArgs = append(ACArgs, types.Types[types.TUINTPTR]) // not argExtra
callArgs = append(callArgs, closure)
- stksize += 2 * int64(types.PtrSize)
- argStart += 2 * int64(types.PtrSize)
+ stksize += int64(types.PtrSize)
+ argStart += int64(types.PtrSize)
}
// Set receiver (for interface calls).
@@ -5109,9 +4976,6 @@ func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Val
// Write args.
t := n.X.Type()
args := n.Args
- if n.Op() == ir.OCALLMETH {
- base.Fatalf("OCALLMETH missed by walkCall")
- }
for _, p := range params.InParams() { // includes receiver for interface calls
ACArgs = append(ACArgs, p.Type)
@@ -5314,9 +5178,13 @@ func (s *state) addr(n ir.Node) *ssa.Value {
case ir.OCALLFUNC, ir.OCALLINTER:
n := n.(*ir.CallExpr)
return s.callAddr(n, callNormal)
- case ir.ODOTTYPE:
- n := n.(*ir.TypeAssertExpr)
- v, _ := s.dottype(n, false)
+ case ir.ODOTTYPE, ir.ODYNAMICDOTTYPE:
+ var v *ssa.Value
+ if n.Op() == ir.ODOTTYPE {
+ v, _ = s.dottype(n.(*ir.TypeAssertExpr), false)
+ } else {
+ v, _ = s.dynamicDottype(n.(*ir.DynamicTypeAssertExpr), false)
+ }
if v.Op != ssa.OpLoad {
s.Fatalf("dottype of non-load")
}
@@ -6210,14 +6078,38 @@ func (s *state) floatToUint(cvttab *f2uCvtTab, n ir.Node, x *ssa.Value, ft, tt *
func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
iface := s.expr(n.X) // input interface
target := s.reflectType(n.Type()) // target type
- byteptr := s.f.Config.Types.BytePtr
+ var targetItab *ssa.Value
+ if n.Itab != nil {
+ targetItab = s.expr(n.Itab)
+ }
+ return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, target, targetItab, commaok)
+}
+
+func (s *state) dynamicDottype(n *ir.DynamicTypeAssertExpr, commaok bool) (res, resok *ssa.Value) {
+ iface := s.expr(n.X)
+ target := s.expr(n.T)
+ var itab *ssa.Value
+ if !n.X.Type().IsEmptyInterface() && !n.Type().IsInterface() {
+ byteptr := s.f.Config.Types.BytePtr
+ itab = target
+ target = s.load(byteptr, s.newValue1I(ssa.OpOffPtr, byteptr, int64(types.PtrSize), itab)) // itab.typ
+ }
+ return s.dottype1(n.Pos(), n.X.Type(), n.Type(), iface, target, itab, commaok)
+}
- if n.Type().IsInterface() {
- if n.Type().IsEmptyInterface() {
+// dottype1 implements a x.(T) operation. iface is the argument (x), dst is the type we're asserting to (T)
+// and src is the type we're asserting from.
+// target is the *runtime._type of dst.
+// If src is a nonempty interface and dst is not an interface, targetItab is an itab representing (dst, src). Otherwise it is nil.
+// commaok is true if the caller wants a boolean success value. Otherwise, the generated code panics if the conversion fails.
+func (s *state) dottype1(pos src.XPos, src, dst *types.Type, iface, target, targetItab *ssa.Value, commaok bool) (res, resok *ssa.Value) {
+ byteptr := s.f.Config.Types.BytePtr
+ if dst.IsInterface() {
+ if dst.IsEmptyInterface() {
// Converting to an empty interface.
// Input could be an empty or nonempty interface.
if base.Debug.TypeAssert > 0 {
- base.WarnfAt(n.Pos(), "type assertion inlined")
+ base.WarnfAt(pos, "type assertion inlined")
}
// Get itab/type field from input.
@@ -6225,7 +6117,7 @@ func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Val
// Conversion succeeds iff that field is not nil.
cond := s.newValue2(ssa.OpNeqPtr, types.Types[types.TBOOL], itab, s.constNil(byteptr))
- if n.X.Type().IsEmptyInterface() && commaok {
+ if src.IsEmptyInterface() && commaok {
// Converting empty interface to empty interface with ,ok is just a nil check.
return iface, cond
}
@@ -6247,7 +6139,7 @@ func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Val
// On success, return (perhaps modified) input interface.
s.startBlock(bOk)
- if n.X.Type().IsEmptyInterface() {
+ if src.IsEmptyInterface() {
res = iface // Use input interface unchanged.
return
}
@@ -6255,7 +6147,7 @@ func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Val
off := s.newValue1I(ssa.OpOffPtr, byteptr, int64(types.PtrSize), itab)
typ := s.load(byteptr, off)
idata := s.newValue1(ssa.OpIData, byteptr, iface)
- res = s.newValue2(ssa.OpIMake, n.Type(), typ, idata)
+ res = s.newValue2(ssa.OpIMake, dst, typ, idata)
return
}
@@ -6277,62 +6169,62 @@ func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Val
bFail.AddEdgeTo(bEnd)
s.startBlock(bEnd)
idata := s.newValue1(ssa.OpIData, byteptr, iface)
- res = s.newValue2(ssa.OpIMake, n.Type(), s.variable(typVar, byteptr), idata)
+ res = s.newValue2(ssa.OpIMake, dst, s.variable(typVar, byteptr), idata)
resok = cond
delete(s.vars, typVar)
return
}
// converting to a nonempty interface needs a runtime call.
if base.Debug.TypeAssert > 0 {
- base.WarnfAt(n.Pos(), "type assertion not inlined")
+ base.WarnfAt(pos, "type assertion not inlined")
}
if !commaok {
fn := ir.Syms.AssertI2I
- if n.X.Type().IsEmptyInterface() {
+ if src.IsEmptyInterface() {
fn = ir.Syms.AssertE2I
}
data := s.newValue1(ssa.OpIData, types.Types[types.TUNSAFEPTR], iface)
tab := s.newValue1(ssa.OpITab, byteptr, iface)
tab = s.rtcall(fn, true, []*types.Type{byteptr}, target, tab)[0]
- return s.newValue2(ssa.OpIMake, n.Type(), tab, data), nil
+ return s.newValue2(ssa.OpIMake, dst, tab, data), nil
}
fn := ir.Syms.AssertI2I2
- if n.X.Type().IsEmptyInterface() {
+ if src.IsEmptyInterface() {
fn = ir.Syms.AssertE2I2
}
- res = s.rtcall(fn, true, []*types.Type{n.Type()}, target, iface)[0]
- resok = s.newValue2(ssa.OpNeqInter, types.Types[types.TBOOL], res, s.constInterface(n.Type()))
+ res = s.rtcall(fn, true, []*types.Type{dst}, target, iface)[0]
+ resok = s.newValue2(ssa.OpNeqInter, types.Types[types.TBOOL], res, s.constInterface(dst))
return
}
if base.Debug.TypeAssert > 0 {
- base.WarnfAt(n.Pos(), "type assertion inlined")
+ base.WarnfAt(pos, "type assertion inlined")
}
// Converting to a concrete type.
- direct := types.IsDirectIface(n.Type())
+ direct := types.IsDirectIface(dst)
itab := s.newValue1(ssa.OpITab, byteptr, iface) // type word of interface
if base.Debug.TypeAssert > 0 {
- base.WarnfAt(n.Pos(), "type assertion inlined")
+ base.WarnfAt(pos, "type assertion inlined")
}
- var targetITab *ssa.Value
- if n.X.Type().IsEmptyInterface() {
+ var wantedFirstWord *ssa.Value
+ if src.IsEmptyInterface() {
// Looking for pointer to target type.
- targetITab = target
+ wantedFirstWord = target
} else {
// Looking for pointer to itab for target type and source interface.
- targetITab = s.expr(n.Itab)
+ wantedFirstWord = targetItab
}
var tmp ir.Node // temporary for use with large types
var addr *ssa.Value // address of tmp
- if commaok && !TypeOK(n.Type()) {
+ if commaok && !TypeOK(dst) {
// unSSAable type, use temporary.
// TODO: get rid of some of these temporaries.
- tmp, addr = s.temp(n.Pos(), n.Type())
+ tmp, addr = s.temp(pos, dst)
}
- cond := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], itab, targetITab)
+ cond := s.newValue2(ssa.OpEqPtr, types.Types[types.TBOOL], itab, wantedFirstWord)
b := s.endBlock()
b.Kind = ssa.BlockIf
b.SetControl(cond)
@@ -6346,8 +6238,8 @@ func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Val
if !commaok {
// on failure, panic by calling panicdottype
s.startBlock(bFail)
- taddr := s.reflectType(n.X.Type())
- if n.X.Type().IsEmptyInterface() {
+ taddr := s.reflectType(src)
+ if src.IsEmptyInterface() {
s.rtcall(ir.Syms.PanicdottypeE, false, nil, itab, target, taddr)
} else {
s.rtcall(ir.Syms.PanicdottypeI, false, nil, itab, target, taddr)
@@ -6356,10 +6248,10 @@ func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Val
// on success, return data from interface
s.startBlock(bOk)
if direct {
- return s.newValue1(ssa.OpIData, n.Type(), iface), nil
+ return s.newValue1(ssa.OpIData, dst, iface), nil
}
- p := s.newValue1(ssa.OpIData, types.NewPtr(n.Type()), iface)
- return s.load(n.Type(), p), nil
+ p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
+ return s.load(dst, p), nil
}
// commaok is the more complicated case because we have
@@ -6373,14 +6265,14 @@ func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Val
s.startBlock(bOk)
if tmp == nil {
if direct {
- s.vars[valVar] = s.newValue1(ssa.OpIData, n.Type(), iface)
+ s.vars[valVar] = s.newValue1(ssa.OpIData, dst, iface)
} else {
- p := s.newValue1(ssa.OpIData, types.NewPtr(n.Type()), iface)
- s.vars[valVar] = s.load(n.Type(), p)
+ p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
+ s.vars[valVar] = s.load(dst, p)
}
} else {
- p := s.newValue1(ssa.OpIData, types.NewPtr(n.Type()), iface)
- s.move(n.Type(), addr, p)
+ p := s.newValue1(ssa.OpIData, types.NewPtr(dst), iface)
+ s.move(dst, addr, p)
}
s.vars[okVar] = s.constBool(true)
s.endBlock()
@@ -6389,9 +6281,9 @@ func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Val
// type assertion failed
s.startBlock(bFail)
if tmp == nil {
- s.vars[valVar] = s.zeroVal(n.Type())
+ s.vars[valVar] = s.zeroVal(dst)
} else {
- s.zero(n.Type(), addr)
+ s.zero(dst, addr)
}
s.vars[okVar] = s.constBool(false)
s.endBlock()
@@ -6400,10 +6292,10 @@ func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Val
// merge point
s.startBlock(bEnd)
if tmp == nil {
- res = s.variable(valVar, n.Type())
+ res = s.variable(valVar, dst)
delete(s.vars, valVar)
} else {
- res = s.load(n.Type(), addr)
+ res = s.load(dst, addr)
s.vars[memVar] = s.newValue1A(ssa.OpVarKill, types.TypeMem, tmp.(*ir.Name), s.mem())
}
resok = s.variable(okVar, types.Types[types.TBOOL])
@@ -6921,8 +6813,12 @@ func genssa(f *ssa.Func, pp *objw.Progs) {
// recovers a panic, it will return to caller with right results.
// The results are already in memory, because they are not SSA'd
// when the function has defers (see canSSAName).
- if f.OwnAux.ABIInfo().OutRegistersUsed() != 0 {
- Arch.LoadRegResults(&s, f)
+ for _, o := range f.OwnAux.ABIInfo().OutParams() {
+ n := o.Name.(*ir.Name)
+ rts, offs := o.RegisterTypesAndOffsets()
+ for i := range o.Registers {
+ Arch.LoadRegResult(&s, f, rts[i], ssa.ObjRegForAbiReg(o.Registers[i], f.Config), n, offs[i])
+ }
}
pp.Prog(obj.ARET)
@@ -7460,18 +7356,6 @@ func (s *State) PrepareCall(v *ssa.Value) {
call, ok := v.Aux.(*ssa.AuxCall)
- if ok && call.Fn == ir.Syms.Deferreturn {
- // Deferred calls will appear to be returning to
- // the CALL deferreturn(SB) that we are about to emit.
- // However, the stack trace code will show the line
- // of the instruction byte before the return PC.
- // To avoid that being an unrelated instruction,
- // insert an actual hardware NOP that will have the right line number.
- // This is different from obj.ANOP, which is a virtual no-op
- // that doesn't make it into the instruction stream.
- Arch.Ginsnopdefer(s.pp)
- }
-
if ok {
// Record call graph information for nowritebarrierrec
// analysis.
@@ -7542,10 +7426,6 @@ func (e *ssafn) Auto(pos src.XPos, t *types.Type) *ir.Name {
return typecheck.TempAt(pos, e.curfn, t) // Note: adds new auto to e.curfn.Func.Dcl list
}
-func (e *ssafn) DerefItab(it *obj.LSym, offset int64) *obj.LSym {
- return reflectdata.ITabSym(it, offset)
-}
-
// SplitSlot returns a slot representing the data of parent starting at offset.
func (e *ssafn) SplitSlot(parent *ssa.LocalSlot, suffix string, offset int64, t *types.Type) ssa.LocalSlot {
node := parent.N
@@ -7676,9 +7556,8 @@ func max8(a, b int8) int8 {
return b
}
-// deferstruct makes a runtime._defer structure, with additional space for
-// stksize bytes of args.
-func deferstruct(stksize int64) *types.Type {
+// deferstruct makes a runtime._defer structure.
+func deferstruct() *types.Type {
makefield := func(name string, typ *types.Type) *types.Field {
// Unlike the global makefield function, this one needs to set Pkg
// because these types might be compared (in SSA CSE sorting).
@@ -7686,13 +7565,9 @@ func deferstruct(stksize int64) *types.Type {
sym := &types.Sym{Name: name, Pkg: types.LocalPkg}
return types.NewField(src.NoXPos, sym, typ)
}
- argtype := types.NewArray(types.Types[types.TUINT8], stksize)
- argtype.Width = stksize
- argtype.Align = 1
// These fields must match the ones in runtime/runtime2.go:_defer and
- // cmd/compile/internal/gc/ssa.go:(*state).call.
+ // (*state).call above.
fields := []*types.Field{
- makefield("siz", types.Types[types.TUINT32]),
makefield("started", types.Types[types.TBOOL]),
makefield("heap", types.Types[types.TBOOL]),
makefield("openDefer", types.Types[types.TBOOL]),
@@ -7704,10 +7579,9 @@ func deferstruct(stksize int64) *types.Type {
makefield("fn", types.Types[types.TUINTPTR]),
makefield("_panic", types.Types[types.TUINTPTR]),
makefield("link", types.Types[types.TUINTPTR]),
- makefield("framepc", types.Types[types.TUINTPTR]),
- makefield("varp", types.Types[types.TUINTPTR]),
makefield("fd", types.Types[types.TUINTPTR]),
- makefield("args", argtype),
+ makefield("varp", types.Types[types.TUINTPTR]),
+ makefield("framepc", types.Types[types.TUINTPTR]),
}
// build struct holding the above fields
diff --git a/src/cmd/compile/internal/staticdata/data.go b/src/cmd/compile/internal/staticdata/data.go
index abb0bba646..f25d8d8ec5 100644
--- a/src/cmd/compile/internal/staticdata/data.go
+++ b/src/cmd/compile/internal/staticdata/data.go
@@ -92,6 +92,10 @@ func StringSym(pos src.XPos, s string) (data *obj.LSym) {
return symdata
}
+// maxFileSize is the maximum file size permitted by the linker
+// (see issue #9862).
+const maxFileSize = int64(2e9)
+
// fileStringSym returns a symbol for the contents and the size of file.
// If readonly is true, the symbol shares storage with any literal string
// or other file with the same content and is placed in a read-only section.
@@ -133,12 +137,12 @@ func fileStringSym(pos src.XPos, file string, readonly bool, hash []byte) (*obj.
}
return sym, size, nil
}
- if size > 2e9 {
+ if size > maxFileSize {
// ggloblsym takes an int32,
// and probably the rest of the toolchain
// can't handle such big symbols either.
// See golang.org/issue/9862.
- return nil, 0, fmt.Errorf("file too large")
+ return nil, 0, fmt.Errorf("file too large (%d bytes > %d bytes)", size, maxFileSize)
}
// File is too big to read and keep in memory.
diff --git a/src/cmd/compile/internal/staticdata/embed.go b/src/cmd/compile/internal/staticdata/embed.go
index 8936c4f5b4..0730d346b2 100644
--- a/src/cmd/compile/internal/staticdata/embed.go
+++ b/src/cmd/compile/internal/staticdata/embed.go
@@ -108,13 +108,6 @@ func WriteEmbed(v *ir.Name) {
// TODO(mdempsky): User errors should be reported by the frontend.
commentPos := (*v.Embed)[0].Pos
- if !types.AllowsGoVersion(types.LocalPkg, 1, 16) {
- prevPos := base.Pos
- base.Pos = commentPos
- base.ErrorfVers("go1.16", "go:embed")
- base.Pos = prevPos
- return
- }
if base.Flag.Cfg.Embed.Patterns == nil {
base.ErrorfAt(commentPos, "invalid go:embed: build system did not supply embed configuration")
return
diff --git a/src/cmd/compile/internal/staticinit/sched.go b/src/cmd/compile/internal/staticinit/sched.go
index 0c97b6de74..9329a46989 100644
--- a/src/cmd/compile/internal/staticinit/sched.go
+++ b/src/cmd/compile/internal/staticinit/sched.go
@@ -403,10 +403,10 @@ func (s *Schedule) initplan(n ir.Node) {
base.Fatalf("initplan structlit")
}
a := a.(*ir.StructKeyExpr)
- if a.Field.IsBlank() {
+ if a.Sym().IsBlank() {
continue
}
- s.addvalue(p, a.Offset, a.Value)
+ s.addvalue(p, a.Field.Offset, a.Value)
}
case ir.OMAPLIT:
diff --git a/src/cmd/compile/internal/syntax/parser.go b/src/cmd/compile/internal/syntax/parser.go
index e7b8840b33..c477ddd45d 100644
--- a/src/cmd/compile/internal/syntax/parser.go
+++ b/src/cmd/compile/internal/syntax/parser.go
@@ -146,11 +146,13 @@ func (p *parser) updateBase(pos Pos, tline, tcol uint, text string) {
// If we have a column (//line filename:line:col form),
// an empty filename means to use the previous filename.
filename := text[:i-1] // lop off ":line"
+ trimmed := false
if filename == "" && ok2 {
filename = p.base.Filename()
+ trimmed = p.base.Trimmed()
}
- p.base = NewLineBase(pos, filename, line, col)
+ p.base = NewLineBase(pos, filename, trimmed, line, col)
}
func commentText(s string) string {
@@ -604,7 +606,7 @@ func (p *parser) typeDecl(group *Group) Decl {
} else {
// x is the array length expression
if debug && x == nil {
- panic("internal error: nil expression")
+ panic("length expression is nil")
}
d.Type = p.arrayType(pos, x)
}
@@ -1049,7 +1051,16 @@ loop:
}
// x[i:...
- p.want(_Colon)
+ // For better error message, don't simply use p.want(_Colon) here (issue #47704).
+ if !p.got(_Colon) {
+ if p.mode&AllowGenerics == 0 {
+ p.syntaxError("expecting : or ]")
+ p.advance(_Colon, _Rbrack)
+ } else {
+ p.syntaxError("expecting comma, : or ]")
+ p.advance(_Comma, _Colon, _Rbrack)
+ }
+ }
p.xnest++
t := new(SliceExpr)
t.pos = pos
@@ -1100,7 +1111,7 @@ loop:
complit_ok = true
}
case *IndexExpr:
- if p.xnest >= 0 {
+ if p.xnest >= 0 && !isValue(t) {
// x is possibly a composite literal type
complit_ok = true
}
@@ -1127,6 +1138,21 @@ loop:
return x
}
+// isValue reports whether x syntactically must be a value (and not a type) expression.
+func isValue(x Expr) bool {
+ switch x := x.(type) {
+ case *BasicLit, *CompositeLit, *FuncLit, *SliceExpr, *AssertExpr, *TypeSwitchGuard, *CallExpr:
+ return true
+ case *Operation:
+ return x.Op != Mul || x.Y != nil // *T may be a type
+ case *ParenExpr:
+ return isValue(x.X)
+ case *IndexExpr:
+ return isValue(x.X) || isValue(x.Index)
+ }
+ return false
+}
+
// Element = Expression | LiteralValue .
func (p *parser) bare_complitexpr() Expr {
if trace {
@@ -1443,6 +1469,18 @@ func (p *parser) interfaceType() *InterfaceType {
}
return false
}
+
+ default:
+ if p.mode&AllowGenerics != 0 {
+ pos := p.pos()
+ if t := p.typeOrNil(); t != nil {
+ f := new(Field)
+ f.pos = pos
+ f.Type = t
+ typ.MethodList = append(typ.MethodList, p.embeddedElem(f))
+ return false
+ }
+ }
}
if p.mode&AllowGenerics != 0 {
diff --git a/src/cmd/compile/internal/syntax/pos.go b/src/cmd/compile/internal/syntax/pos.go
index baebcc995c..1494c0989f 100644
--- a/src/cmd/compile/internal/syntax/pos.go
+++ b/src/cmd/compile/internal/syntax/pos.go
@@ -133,13 +133,19 @@ type PosBase struct {
pos Pos
filename string
line, col uint32
+ trimmed bool // whether -trimpath has been applied
}
// NewFileBase returns a new PosBase for the given filename.
// A file PosBase's position is relative to itself, with the
// position being filename:1:1.
func NewFileBase(filename string) *PosBase {
- base := &PosBase{MakePos(nil, linebase, colbase), filename, linebase, colbase}
+ return NewTrimmedFileBase(filename, false)
+}
+
+// NewTrimmedFileBase is like NewFileBase, but allows specifying Trimmed.
+func NewTrimmedFileBase(filename string, trimmed bool) *PosBase {
+ base := &PosBase{MakePos(nil, linebase, colbase), filename, linebase, colbase, trimmed}
base.pos.base = base
return base
}
@@ -149,8 +155,8 @@ func NewFileBase(filename string) *PosBase {
// the comment containing the line directive. For a directive in a line comment,
// that position is the beginning of the next line (i.e., the newline character
// belongs to the line comment).
-func NewLineBase(pos Pos, filename string, line, col uint) *PosBase {
- return &PosBase{pos, filename, sat32(line), sat32(col)}
+func NewLineBase(pos Pos, filename string, trimmed bool, line, col uint) *PosBase {
+ return &PosBase{pos, filename, sat32(line), sat32(col), trimmed}
}
func (base *PosBase) IsFileBase() bool {
@@ -188,6 +194,13 @@ func (base *PosBase) Col() uint {
return uint(base.col)
}
+func (base *PosBase) Trimmed() bool {
+ if base == nil {
+ return false
+ }
+ return base.trimmed
+}
+
func sat32(x uint) uint32 {
if x > PosMax {
return PosMax
diff --git a/src/cmd/compile/internal/syntax/positions.go b/src/cmd/compile/internal/syntax/positions.go
index b00f86c67c..93596559a0 100644
--- a/src/cmd/compile/internal/syntax/positions.go
+++ b/src/cmd/compile/internal/syntax/positions.go
@@ -12,7 +12,7 @@ func StartPos(n Node) Pos {
for m := n; ; {
switch n := m.(type) {
case nil:
- panic("internal error: nil")
+ panic("nil node")
// packages
case *File:
@@ -124,7 +124,7 @@ func EndPos(n Node) Pos {
for m := n; ; {
switch n := m.(type) {
case nil:
- panic("internal error: nil")
+ panic("nil node")
// packages
case *File:
diff --git a/src/cmd/compile/internal/syntax/testdata/go2/linalg.go2 b/src/cmd/compile/internal/syntax/testdata/go2/linalg.go2
index 0d27603a58..822d0287e7 100644
--- a/src/cmd/compile/internal/syntax/testdata/go2/linalg.go2
+++ b/src/cmd/compile/internal/syntax/testdata/go2/linalg.go2
@@ -9,10 +9,10 @@ import "math"
// Numeric is type bound that matches any numeric type.
// It would likely be in a constraints package in the standard library.
type Numeric interface {
- type int, int8, int16, int32, int64,
- uint, uint8, uint16, uint32, uint64, uintptr,
- float32, float64,
- complex64, complex128
+ ~int | ~int8 | ~int16 | ~int32 | ~int64 |
+ uint | ~uint8 | ~uint16 | ~uint32 | ~uint64 | ~uintptr |
+ float32 | ~float64 |
+ complex64 | ~complex128
}
func DotProduct[T Numeric](s1, s2 []T) T {
@@ -42,14 +42,14 @@ func AbsDifference[T NumericAbs[T]](a, b T) T {
// OrderedNumeric is a type bound that matches numeric types that support the < operator.
type OrderedNumeric interface {
- type int, int8, int16, int32, int64,
- uint, uint8, uint16, uint32, uint64, uintptr,
- float32, float64
+ ~int | ~int8 | ~int16 | ~int32 | ~int64 |
+ uint | ~uint8 | ~uint16 | ~uint32 | ~uint64 | ~uintptr |
+ float32 | ~float64
}
// Complex is a type bound that matches the two complex types, which do not have a < operator.
type Complex interface {
- type complex64, complex128
+ ~complex64 | ~complex128
}
// OrderedAbs is a helper type that defines an Abs method for
diff --git a/src/cmd/compile/internal/syntax/testdata/go2/smoketest.go2 b/src/cmd/compile/internal/syntax/testdata/go2/smoketest.go2
index e5cfba0612..42efb42527 100644
--- a/src/cmd/compile/internal/syntax/testdata/go2/smoketest.go2
+++ b/src/cmd/compile/internal/syntax/testdata/go2/smoketest.go2
@@ -46,12 +46,12 @@ type _ struct{ T[int] }
// interfaces
type _ interface{
m()
- type int
+ ~int
}
type _ interface{
- type int, float, string
- type complex128
+ ~int | ~float | ~string
+ ~complex128
underlying(underlying underlying) underlying
}
diff --git a/src/cmd/compile/internal/syntax/testdata/go2/typeinst2.go2 b/src/cmd/compile/internal/syntax/testdata/go2/typeinst2.go2
index 6e2104a515..f3deb703b6 100644
--- a/src/cmd/compile/internal/syntax/testdata/go2/typeinst2.go2
+++ b/src/cmd/compile/internal/syntax/testdata/go2/typeinst2.go2
@@ -175,12 +175,12 @@ type _ interface {
// Interface type lists can contain any type, incl. *Named types.
// Verify that we use the underlying type to compute the operational type.
type MyInt int
-func add1[T interface{type MyInt}](x T) T {
+func add1[T interface{ ~MyInt }](x T) T {
return x + 1
}
type MyString string
-func double[T interface{type MyInt, MyString}](x T) T {
+func double[T interface{ ~MyInt | ~MyString }](x T) T {
return x + x
}
@@ -189,15 +189,15 @@ func double[T interface{type MyInt, MyString}](x T) T {
// type lists.
type E0 interface {
- type int, bool, string
+ ~int | ~bool | ~string
}
type E1 interface {
- type int, float64, string
+ ~int | ~float64 | ~string
}
type E2 interface {
- type float64
+ ~float64
}
type I0 interface {
diff --git a/src/cmd/compile/internal/syntax/testdata/go2/typeparams.go2 b/src/cmd/compile/internal/syntax/testdata/go2/typeparams.go2
index f78037f0f5..111f7c1004 100644
--- a/src/cmd/compile/internal/syntax/testdata/go2/typeparams.go2
+++ b/src/cmd/compile/internal/syntax/testdata/go2/typeparams.go2
@@ -48,22 +48,22 @@ func swapswap[A, B any](a A, b B) (A, B) {
type F[A, B any] func(A, B) (B, A)
-func min[T interface{ type int }](x, y T) T {
+func min[T interface{ ~int }](x, y T) T {
if x < y {
return x
}
return y
}
-func _[T interface{type int, float32}](x, y T) bool { return x < y }
+func _[T interface{ ~int | ~float32 }](x, y T) bool { return x < y }
func _[T any](x, y T) bool { return x /* ERROR cannot compare */ < y }
-func _[T interface{type int, float32, bool}](x, y T) bool { return x /* ERROR cannot compare */ < y }
+func _[T interface{ ~int | ~float32 | ~bool }](x, y T) bool { return x /* ERROR cannot compare */ < y }
func _[T C1[T]](x, y T) bool { return x /* ERROR cannot compare */ < y }
func _[T C2[T]](x, y T) bool { return x < y }
type C1[T any] interface{}
-type C2[T any] interface{ type int, float32 }
+type C2[T any] interface{ ~int | ~float32 }
func new[T any]() *T {
var x T
@@ -91,40 +91,40 @@ var _ = f3[int, rune, bool](1, struct{x rune}{}, nil)
// indexing
func _[T any] (x T, i int) { _ = x /* ERROR "cannot index" */ [i] }
-func _[T interface{ type int }] (x T, i int) { _ = x /* ERROR "cannot index" */ [i] }
-func _[T interface{ type string }] (x T, i int) { _ = x[i] }
-func _[T interface{ type []int }] (x T, i int) { _ = x[i] }
-func _[T interface{ type [10]int, *[20]int, map[string]int }] (x T, i int) { _ = x[i] }
-func _[T interface{ type string, []byte }] (x T, i int) { _ = x[i] }
-func _[T interface{ type []int, [1]rune }] (x T, i int) { _ = x /* ERROR "cannot index" */ [i] }
-func _[T interface{ type string, []rune }] (x T, i int) { _ = x /* ERROR "cannot index" */ [i] }
+func _[T interface{ ~int }] (x T, i int) { _ = x /* ERROR "cannot index" */ [i] }
+func _[T interface{ ~string }] (x T, i int) { _ = x[i] }
+func _[T interface{ ~[]int }] (x T, i int) { _ = x[i] }
+func _[T interface{ ~[10]int | ~*[20]int | ~map[string]int }] (x T, i int) { _ = x[i] }
+func _[T interface{ ~string | ~[]byte }] (x T, i int) { _ = x[i] }
+func _[T interface{ ~[]int | ~[1]rune }] (x T, i int) { _ = x /* ERROR "cannot index" */ [i] }
+func _[T interface{ ~string | ~[]rune }] (x T, i int) { _ = x /* ERROR "cannot index" */ [i] }
// slicing
// TODO(gri) implement this
-func _[T interface{ type string }] (x T, i, j, k int) { _ = x /* ERROR invalid operation */ [i:j:k] }
+func _[T interface{ ~string }] (x T, i, j, k int) { _ = x /* ERROR invalid operation */ [i:j:k] }
// len/cap built-ins
func _[T any](x T) { _ = len(x /* ERROR invalid argument */ ) }
-func _[T interface{ type int }](x T) { _ = len(x /* ERROR invalid argument */ ) }
-func _[T interface{ type string, []byte, int }](x T) { _ = len(x /* ERROR invalid argument */ ) }
-func _[T interface{ type string }](x T) { _ = len(x) }
-func _[T interface{ type [10]int }](x T) { _ = len(x) }
-func _[T interface{ type []byte }](x T) { _ = len(x) }
-func _[T interface{ type map[int]int }](x T) { _ = len(x) }
-func _[T interface{ type chan int }](x T) { _ = len(x) }
-func _[T interface{ type string, []byte, chan int }](x T) { _ = len(x) }
+func _[T interface{ ~int }](x T) { _ = len(x /* ERROR invalid argument */ ) }
+func _[T interface{ ~string | ~[]byte | ~int }](x T) { _ = len(x /* ERROR invalid argument */ ) }
+func _[T interface{ ~string }](x T) { _ = len(x) }
+func _[T interface{ ~[10]int }](x T) { _ = len(x) }
+func _[T interface{ ~[]byte }](x T) { _ = len(x) }
+func _[T interface{ ~map[int]int }](x T) { _ = len(x) }
+func _[T interface{ ~chan int }](x T) { _ = len(x) }
+func _[T interface{ ~string | ~[]byte | ~chan int }](x T) { _ = len(x) }
func _[T any](x T) { _ = cap(x /* ERROR invalid argument */ ) }
-func _[T interface{ type int }](x T) { _ = cap(x /* ERROR invalid argument */ ) }
-func _[T interface{ type string, []byte, int }](x T) { _ = cap(x /* ERROR invalid argument */ ) }
-func _[T interface{ type string }](x T) { _ = cap(x /* ERROR invalid argument */ ) }
-func _[T interface{ type [10]int }](x T) { _ = cap(x) }
-func _[T interface{ type []byte }](x T) { _ = cap(x) }
-func _[T interface{ type map[int]int }](x T) { _ = cap(x /* ERROR invalid argument */ ) }
-func _[T interface{ type chan int }](x T) { _ = cap(x) }
-func _[T interface{ type []byte, chan int }](x T) { _ = cap(x) }
+func _[T interface{ ~int }](x T) { _ = cap(x /* ERROR invalid argument */ ) }
+func _[T interface{ ~string | ~[]byte | ~int }](x T) { _ = cap(x /* ERROR invalid argument */ ) }
+func _[T interface{ ~string }](x T) { _ = cap(x /* ERROR invalid argument */ ) }
+func _[T interface{ ~[10]int }](x T) { _ = cap(x) }
+func _[T interface{ ~[]byte }](x T) { _ = cap(x) }
+func _[T interface{ ~map[int]int }](x T) { _ = cap(x /* ERROR invalid argument */ ) }
+func _[T interface{ ~chan int }](x T) { _ = cap(x) }
+func _[T interface{ ~[]byte | ~chan int }](x T) { _ = cap(x) }
// range iteration
@@ -132,7 +132,7 @@ func _[T interface{}](x T) {
for range x /* ERROR cannot range */ {}
}
-func _[T interface{ type string, []string }](x T) {
+func _[T interface{ ~string | ~[]string }](x T) {
for range x {}
for i := range x { _ = i }
for i, _ := range x { _ = i }
@@ -144,23 +144,23 @@ func _[T interface{ type string, []string }](x T) {
}
-func _[T interface{ type string, []rune, map[int]rune }](x T) {
+func _[T interface{ ~string | ~[]rune | ~map[int]rune }](x T) {
for _, e := range x { _ = e }
for i, e := range x { _ = i; _ = e }
}
-func _[T interface{ type string, []rune, map[string]rune }](x T) {
+func _[T interface{ ~string | ~[]rune | ~map[string]rune }](x T) {
for _, e := range x { _ = e }
for i, e := range x /* ERROR must have the same key type */ { _ = e }
}
-func _[T interface{ type string, chan int }](x T) {
+func _[T interface{ ~string | ~chan int }](x T) {
for range x {}
for i := range x { _ = i }
for i, _ := range x { _ = i } // TODO(gri) should get an error here: channels only return one value
}
-func _[T interface{ type string, chan<-int }](x T) {
+func _[T interface{ ~string | ~chan<-int }](x T) {
for i := range x /* ERROR send-only channel */ { _ = i }
}
@@ -388,7 +388,7 @@ func _[T any](x T) {
}
}
-func _[T interface{type int}](x T) {
+func _[T interface{ ~int }](x T) {
_ = x /* ERROR not an interface */ .(int)
switch x /* ERROR not an interface */ .(type) {
}
diff --git a/src/cmd/compile/internal/syntax/testdata/interface.go2 b/src/cmd/compile/internal/syntax/testdata/interface.go2
index a817327a43..b399d75148 100644
--- a/src/cmd/compile/internal/syntax/testdata/interface.go2
+++ b/src/cmd/compile/internal/syntax/testdata/interface.go2
@@ -25,7 +25,6 @@ type _ interface {
~int | ~string
}
-
type _ interface {
m()
~int
@@ -34,3 +33,48 @@ type _ interface {
~int | ~string
type bool, int, float64
}
+
+type _ interface {
+ int
+ []byte
+ [10]int
+ struct{}
+ *int
+ func()
+ interface{}
+ map[string]int
+ chan T
+ chan<- T
+ <-chan T
+ T[int]
+}
+
+type _ interface {
+ int | string
+ []byte | string
+ [10]int | string
+ struct{} | string
+ *int | string
+ func() | string
+ interface{} | string
+ map[string]int | string
+ chan T | string
+ chan<- T | string
+ <-chan T | string
+ T[int] | string
+}
+
+type _ interface {
+ ~int | string
+ ~[]byte | string
+ ~[10]int | string
+ ~struct{} | string
+ ~*int | string
+ ~func() | string
+ ~interface{} | string
+ ~map[string]int | string
+ ~chan T | string
+ ~chan<- T | string
+ ~<-chan T | string
+ ~T[int] | string
+}
diff --git a/src/cmd/gofmt/gofmt_typeparams_test.go b/src/cmd/compile/internal/syntax/testdata/issue46558.src
index 10641a77cb..a22b600825 100644
--- a/src/cmd/gofmt/gofmt_typeparams_test.go
+++ b/src/cmd/compile/internal/syntax/testdata/issue46558.src
@@ -2,11 +2,13 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-//go:build typeparams
-// +build typeparams
+package p
-package main
-
-func init() {
- typeParamsEnabled = true
-}
+func F(s string) {
+ switch s[0] {
+ case 'a':
+ case s[2] { // ERROR unexpected {
+ case 'b':
+ }
+ }
+} // ERROR non-declaration statement
diff --git a/src/cmd/compile/internal/syntax/testdata/issue47704.go2 b/src/cmd/compile/internal/syntax/testdata/issue47704.go2
new file mode 100644
index 0000000000..4e65857f3b
--- /dev/null
+++ b/src/cmd/compile/internal/syntax/testdata/issue47704.go2
@@ -0,0 +1,18 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package p
+
+// error messages for parser in generic mode
+func _() {
+ _ = m[] // ERROR expecting operand
+ _ = m[x,]
+ _ = m[x /* ERROR unexpected a */ a b c d]
+}
+
+// test case from the issue
+func f(m map[int]int) int {
+ return m[0 // ERROR expecting comma, \: or \]
+ ]
+}
diff --git a/src/cmd/compile/internal/syntax/testdata/issue47704.src b/src/cmd/compile/internal/syntax/testdata/issue47704.src
new file mode 100644
index 0000000000..0156af7d8d
--- /dev/null
+++ b/src/cmd/compile/internal/syntax/testdata/issue47704.src
@@ -0,0 +1,18 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package p
+
+// error messages for parser in non-generic mode
+func _() {
+ _ = m[] // ERROR expecting operand
+ _ = m[x,] // ERROR unexpected comma, expecting \: or \]
+ _ = m[x /* ERROR unexpected a */ a b c d]
+}
+
+// test case from the issue
+func f(m map[int]int) int {
+ return m[0 // ERROR expecting \: or \]
+ ]
+}
diff --git a/src/cmd/compile/internal/syntax/walk.go b/src/cmd/compile/internal/syntax/walk.go
index c26e97a0d8..b025844204 100644
--- a/src/cmd/compile/internal/syntax/walk.go
+++ b/src/cmd/compile/internal/syntax/walk.go
@@ -8,31 +8,73 @@ package syntax
import "fmt"
-// Walk traverses a syntax in pre-order: It starts by calling f(root);
-// root must not be nil. If f returns false (== "continue"), Walk calls
+// Inspect traverses an AST in pre-order: It starts by calling
+// f(node); node must not be nil. If f returns true, Inspect invokes f
+// recursively for each of the non-nil children of node, followed by a
+// call of f(nil).
+//
+// See Walk for caveats about shared nodes.
+func Inspect(root Node, f func(Node) bool) {
+ Walk(root, inspector(f))
+}
+
+type inspector func(Node) bool
+
+func (v inspector) Visit(node Node) Visitor {
+ if v(node) {
+ return v
+ }
+ return nil
+}
+
+// Crawl traverses a syntax in pre-order: It starts by calling f(root);
+// root must not be nil. If f returns false (== "continue"), Crawl calls
// f recursively for each of the non-nil children of that node; if f
-// returns true (== "stop"), Walk does not traverse the respective node's
+// returns true (== "stop"), Crawl does not traverse the respective node's
// children.
+//
+// See Walk for caveats about shared nodes.
+//
+// Deprecated: Use Inspect instead.
+func Crawl(root Node, f func(Node) bool) {
+ Inspect(root, func(node Node) bool {
+ return node != nil && !f(node)
+ })
+}
+
+// Walk traverses an AST in pre-order: It starts by calling
+// v.Visit(node); node must not be nil. If the visitor w returned by
+// v.Visit(node) is not nil, Walk is invoked recursively with visitor
+// w for each of the non-nil children of node, followed by a call of
+// w.Visit(nil).
+//
// Some nodes may be shared among multiple parent nodes (e.g., types in
// field lists such as type T in "a, b, c T"). Such shared nodes are
// walked multiple times.
// TODO(gri) Revisit this design. It may make sense to walk those nodes
// only once. A place where this matters is types2.TestResolveIdents.
-func Walk(root Node, f func(Node) bool) {
- w := walker{f}
- w.node(root)
+func Walk(root Node, v Visitor) {
+ walker{v}.node(root)
+}
+
+// A Visitor's Visit method is invoked for each node encountered by Walk.
+// If the result visitor w is not nil, Walk visits each of the children
+// of node with the visitor w, followed by a call of w.Visit(nil).
+type Visitor interface {
+ Visit(node Node) (w Visitor)
}
type walker struct {
- f func(Node) bool
+ v Visitor
}
-func (w *walker) node(n Node) {
+func (w walker) node(n Node) {
if n == nil {
- panic("invalid syntax tree: nil node")
+ panic("nil node")
}
- if w.f(n) {
+ w.v = w.v.Visit(n)
+ if w.v == nil {
return
}
@@ -285,33 +327,35 @@ func (w *walker) node(n Node) {
default:
panic(fmt.Sprintf("internal error: unknown node type %T", n))
}
+
+ w.v.Visit(nil)
}
-func (w *walker) declList(list []Decl) {
+func (w walker) declList(list []Decl) {
for _, n := range list {
w.node(n)
}
}
-func (w *walker) exprList(list []Expr) {
+func (w walker) exprList(list []Expr) {
for _, n := range list {
w.node(n)
}
}
-func (w *walker) stmtList(list []Stmt) {
+func (w walker) stmtList(list []Stmt) {
for _, n := range list {
w.node(n)
}
}
-func (w *walker) nameList(list []*Name) {
+func (w walker) nameList(list []*Name) {
for _, n := range list {
w.node(n)
}
}
-func (w *walker) fieldList(list []*Field) {
+func (w walker) fieldList(list []*Field) {
for _, n := range list {
w.node(n)
}
diff --git a/src/cmd/compile/internal/test/abiutils_test.go b/src/cmd/compile/internal/test/abiutils_test.go
index b752c48612..839546bcb8 100644
--- a/src/cmd/compile/internal/test/abiutils_test.go
+++ b/src/cmd/compile/internal/test/abiutils_test.go
@@ -33,6 +33,8 @@ func TestMain(m *testing.M) {
base.Ctxt.DiagFunc = base.Errorf
base.Ctxt.DiagFlush = base.FlushErrors
base.Ctxt.Bso = bufio.NewWriter(os.Stdout)
+ types.LocalPkg = types.NewPkg("", "local")
+ types.LocalPkg.Prefix = `""`
types.PtrSize = ssagen.Arch.LinkArch.PtrSize
types.RegSize = ssagen.Arch.LinkArch.RegSize
typecheck.InitUniverse()
@@ -309,8 +311,8 @@ func TestABIUtilsInterfaces(t *testing.T) {
ei := types.Types[types.TINTER] // interface{}
pei := types.NewPtr(ei) // *interface{}
fldt := mkFuncType(types.FakeRecvType(), []*types.Type{},
- []*types.Type{types.UntypedString})
- field := types.NewField(src.NoXPos, nil, fldt)
+ []*types.Type{types.Types[types.TSTRING]})
+ field := types.NewField(src.NoXPos, typecheck.Lookup("F"), fldt)
nei := types.NewInterface(types.LocalPkg, []*types.Field{field})
i16 := types.Types[types.TINT16]
tb := types.Types[types.TBOOL]
@@ -322,12 +324,12 @@ func TestABIUtilsInterfaces(t *testing.T) {
IN 0: R{ I0 I1 I2 } spilloffset: 0 typ: struct { int16; int16; bool }
IN 1: R{ I3 I4 } spilloffset: 8 typ: interface {}
IN 2: R{ I5 I6 } spilloffset: 24 typ: interface {}
- IN 3: R{ I7 I8 } spilloffset: 40 typ: interface { () untyped string }
+ IN 3: R{ I7 I8 } spilloffset: 40 typ: interface { F() string }
IN 4: R{ } offset: 0 typ: *interface {}
- IN 5: R{ } offset: 8 typ: interface { () untyped string }
+ IN 5: R{ } offset: 8 typ: interface { F() string }
IN 6: R{ } offset: 24 typ: int16
OUT 0: R{ I0 I1 } spilloffset: -1 typ: interface {}
- OUT 1: R{ I2 I3 } spilloffset: -1 typ: interface { () untyped string }
+ OUT 1: R{ I2 I3 } spilloffset: -1 typ: interface { F() string }
OUT 2: R{ I4 } spilloffset: -1 typ: *interface {}
offsetToSpillArea: 32 spillAreaSize: 56
`)
diff --git a/src/cmd/compile/internal/test/inl_test.go b/src/cmd/compile/internal/test/inl_test.go
index 6f100033cf..bbdbe0c37c 100644
--- a/src/cmd/compile/internal/test/inl_test.go
+++ b/src/cmd/compile/internal/test/inl_test.go
@@ -42,13 +42,10 @@ func TestIntendedInlining(t *testing.T) {
"bucketMask",
"bucketShift",
"chanbuf",
- "deferArgs",
- "deferclass",
"evacuated",
"fastlog2",
"fastrand",
"float64bits",
- "funcPC",
"getArgInfoFast",
"getm",
"getMCache",
@@ -65,7 +62,6 @@ func TestIntendedInlining(t *testing.T) {
"subtract1",
"subtractb",
"tophash",
- "totaldefersize",
"(*bmap).keys",
"(*bmap).overflow",
"(*waitq).enqueue",
diff --git a/src/cmd/compile/internal/typecheck/bexport.go b/src/cmd/compile/internal/typecheck/bexport.go
index 4a84bb13fa..cc7f91f937 100644
--- a/src/cmd/compile/internal/typecheck/bexport.go
+++ b/src/cmd/compile/internal/typecheck/bexport.go
@@ -96,6 +96,9 @@ func predeclared() []*types.Type {
// any type, for builtin export data
types.Types[types.TANY],
+
+ // comparable
+ types.ComparableType,
}
}
return predecl
diff --git a/src/cmd/compile/internal/typecheck/builtin.go b/src/cmd/compile/internal/typecheck/builtin.go
index 833b17b414..3f177d9173 100644
--- a/src/cmd/compile/internal/typecheck/builtin.go
+++ b/src/cmd/compile/internal/typecheck/builtin.go
@@ -71,137 +71,135 @@ var runtimeDecls = [...]struct {
{"slicecopy", funcTag, 54},
{"decoderune", funcTag, 55},
{"countrunes", funcTag, 56},
- {"convI2I", funcTag, 57},
- {"convT16", funcTag, 59},
- {"convT32", funcTag, 61},
- {"convT64", funcTag, 62},
- {"convTstring", funcTag, 63},
- {"convTslice", funcTag, 66},
- {"convT2E", funcTag, 67},
- {"convT2Enoptr", funcTag, 67},
- {"convT2I", funcTag, 67},
- {"convT2Inoptr", funcTag, 67},
- {"assertE2I", funcTag, 68},
- {"assertE2I2", funcTag, 57},
- {"assertI2I", funcTag, 68},
- {"assertI2I2", funcTag, 57},
- {"panicdottypeE", funcTag, 69},
- {"panicdottypeI", funcTag, 69},
- {"panicnildottype", funcTag, 70},
- {"ifaceeq", funcTag, 72},
- {"efaceeq", funcTag, 72},
- {"fastrand", funcTag, 73},
- {"makemap64", funcTag, 75},
- {"makemap", funcTag, 76},
- {"makemap_small", funcTag, 77},
- {"mapaccess1", funcTag, 78},
- {"mapaccess1_fast32", funcTag, 79},
- {"mapaccess1_fast64", funcTag, 80},
- {"mapaccess1_faststr", funcTag, 81},
- {"mapaccess1_fat", funcTag, 82},
- {"mapaccess2", funcTag, 83},
- {"mapaccess2_fast32", funcTag, 84},
- {"mapaccess2_fast64", funcTag, 85},
- {"mapaccess2_faststr", funcTag, 86},
- {"mapaccess2_fat", funcTag, 87},
- {"mapassign", funcTag, 78},
- {"mapassign_fast32", funcTag, 79},
- {"mapassign_fast32ptr", funcTag, 88},
- {"mapassign_fast64", funcTag, 80},
- {"mapassign_fast64ptr", funcTag, 88},
- {"mapassign_faststr", funcTag, 81},
- {"mapiterinit", funcTag, 89},
- {"mapdelete", funcTag, 89},
- {"mapdelete_fast32", funcTag, 90},
- {"mapdelete_fast64", funcTag, 91},
- {"mapdelete_faststr", funcTag, 92},
- {"mapiternext", funcTag, 93},
- {"mapclear", funcTag, 94},
- {"makechan64", funcTag, 96},
- {"makechan", funcTag, 97},
- {"chanrecv1", funcTag, 99},
- {"chanrecv2", funcTag, 100},
- {"chansend1", funcTag, 102},
+ {"convI2I", funcTag, 58},
+ {"convT", funcTag, 59},
+ {"convTnoptr", funcTag, 59},
+ {"convT16", funcTag, 61},
+ {"convT32", funcTag, 63},
+ {"convT64", funcTag, 64},
+ {"convTstring", funcTag, 65},
+ {"convTslice", funcTag, 68},
+ {"assertE2I", funcTag, 69},
+ {"assertE2I2", funcTag, 70},
+ {"assertI2I", funcTag, 69},
+ {"assertI2I2", funcTag, 70},
+ {"panicdottypeE", funcTag, 71},
+ {"panicdottypeI", funcTag, 71},
+ {"panicnildottype", funcTag, 72},
+ {"ifaceeq", funcTag, 73},
+ {"efaceeq", funcTag, 73},
+ {"fastrand", funcTag, 74},
+ {"makemap64", funcTag, 76},
+ {"makemap", funcTag, 77},
+ {"makemap_small", funcTag, 78},
+ {"mapaccess1", funcTag, 79},
+ {"mapaccess1_fast32", funcTag, 80},
+ {"mapaccess1_fast64", funcTag, 81},
+ {"mapaccess1_faststr", funcTag, 82},
+ {"mapaccess1_fat", funcTag, 83},
+ {"mapaccess2", funcTag, 84},
+ {"mapaccess2_fast32", funcTag, 85},
+ {"mapaccess2_fast64", funcTag, 86},
+ {"mapaccess2_faststr", funcTag, 87},
+ {"mapaccess2_fat", funcTag, 88},
+ {"mapassign", funcTag, 79},
+ {"mapassign_fast32", funcTag, 80},
+ {"mapassign_fast32ptr", funcTag, 89},
+ {"mapassign_fast64", funcTag, 81},
+ {"mapassign_fast64ptr", funcTag, 89},
+ {"mapassign_faststr", funcTag, 82},
+ {"mapiterinit", funcTag, 90},
+ {"mapdelete", funcTag, 90},
+ {"mapdelete_fast32", funcTag, 91},
+ {"mapdelete_fast64", funcTag, 92},
+ {"mapdelete_faststr", funcTag, 93},
+ {"mapiternext", funcTag, 94},
+ {"mapclear", funcTag, 95},
+ {"makechan64", funcTag, 97},
+ {"makechan", funcTag, 98},
+ {"chanrecv1", funcTag, 100},
+ {"chanrecv2", funcTag, 101},
+ {"chansend1", funcTag, 103},
{"closechan", funcTag, 30},
- {"writeBarrier", varTag, 104},
- {"typedmemmove", funcTag, 105},
- {"typedmemclr", funcTag, 106},
- {"typedslicecopy", funcTag, 107},
- {"selectnbsend", funcTag, 108},
- {"selectnbrecv", funcTag, 109},
- {"selectsetpc", funcTag, 110},
- {"selectgo", funcTag, 111},
+ {"writeBarrier", varTag, 105},
+ {"typedmemmove", funcTag, 106},
+ {"typedmemclr", funcTag, 107},
+ {"typedslicecopy", funcTag, 108},
+ {"selectnbsend", funcTag, 109},
+ {"selectnbrecv", funcTag, 110},
+ {"selectsetpc", funcTag, 111},
+ {"selectgo", funcTag, 112},
{"block", funcTag, 9},
- {"makeslice", funcTag, 112},
- {"makeslice64", funcTag, 113},
- {"makeslicecopy", funcTag, 114},
- {"growslice", funcTag, 116},
- {"unsafeslice", funcTag, 117},
- {"unsafeslice64", funcTag, 118},
- {"unsafeslicecheckptr", funcTag, 118},
- {"memmove", funcTag, 119},
- {"memclrNoHeapPointers", funcTag, 120},
- {"memclrHasPointers", funcTag, 120},
- {"memequal", funcTag, 121},
- {"memequal0", funcTag, 122},
- {"memequal8", funcTag, 122},
- {"memequal16", funcTag, 122},
- {"memequal32", funcTag, 122},
- {"memequal64", funcTag, 122},
- {"memequal128", funcTag, 122},
- {"f32equal", funcTag, 123},
- {"f64equal", funcTag, 123},
- {"c64equal", funcTag, 123},
- {"c128equal", funcTag, 123},
- {"strequal", funcTag, 123},
- {"interequal", funcTag, 123},
- {"nilinterequal", funcTag, 123},
- {"memhash", funcTag, 124},
- {"memhash0", funcTag, 125},
- {"memhash8", funcTag, 125},
- {"memhash16", funcTag, 125},
- {"memhash32", funcTag, 125},
- {"memhash64", funcTag, 125},
- {"memhash128", funcTag, 125},
- {"f32hash", funcTag, 125},
- {"f64hash", funcTag, 125},
- {"c64hash", funcTag, 125},
- {"c128hash", funcTag, 125},
- {"strhash", funcTag, 125},
- {"interhash", funcTag, 125},
- {"nilinterhash", funcTag, 125},
- {"int64div", funcTag, 126},
- {"uint64div", funcTag, 127},
- {"int64mod", funcTag, 126},
- {"uint64mod", funcTag, 127},
- {"float64toint64", funcTag, 128},
- {"float64touint64", funcTag, 129},
- {"float64touint32", funcTag, 130},
- {"int64tofloat64", funcTag, 131},
- {"uint64tofloat64", funcTag, 132},
- {"uint32tofloat64", funcTag, 133},
- {"complex128div", funcTag, 134},
- {"getcallerpc", funcTag, 135},
- {"getcallersp", funcTag, 135},
+ {"makeslice", funcTag, 113},
+ {"makeslice64", funcTag, 114},
+ {"makeslicecopy", funcTag, 115},
+ {"growslice", funcTag, 117},
+ {"unsafeslice", funcTag, 118},
+ {"unsafeslice64", funcTag, 119},
+ {"unsafeslicecheckptr", funcTag, 119},
+ {"memmove", funcTag, 120},
+ {"memclrNoHeapPointers", funcTag, 121},
+ {"memclrHasPointers", funcTag, 121},
+ {"memequal", funcTag, 122},
+ {"memequal0", funcTag, 123},
+ {"memequal8", funcTag, 123},
+ {"memequal16", funcTag, 123},
+ {"memequal32", funcTag, 123},
+ {"memequal64", funcTag, 123},
+ {"memequal128", funcTag, 123},
+ {"f32equal", funcTag, 124},
+ {"f64equal", funcTag, 124},
+ {"c64equal", funcTag, 124},
+ {"c128equal", funcTag, 124},
+ {"strequal", funcTag, 124},
+ {"interequal", funcTag, 124},
+ {"nilinterequal", funcTag, 124},
+ {"memhash", funcTag, 125},
+ {"memhash0", funcTag, 126},
+ {"memhash8", funcTag, 126},
+ {"memhash16", funcTag, 126},
+ {"memhash32", funcTag, 126},
+ {"memhash64", funcTag, 126},
+ {"memhash128", funcTag, 126},
+ {"f32hash", funcTag, 126},
+ {"f64hash", funcTag, 126},
+ {"c64hash", funcTag, 126},
+ {"c128hash", funcTag, 126},
+ {"strhash", funcTag, 126},
+ {"interhash", funcTag, 126},
+ {"nilinterhash", funcTag, 126},
+ {"int64div", funcTag, 127},
+ {"uint64div", funcTag, 128},
+ {"int64mod", funcTag, 127},
+ {"uint64mod", funcTag, 128},
+ {"float64toint64", funcTag, 129},
+ {"float64touint64", funcTag, 130},
+ {"float64touint32", funcTag, 131},
+ {"int64tofloat64", funcTag, 132},
+ {"uint64tofloat64", funcTag, 133},
+ {"uint32tofloat64", funcTag, 134},
+ {"complex128div", funcTag, 135},
+ {"getcallerpc", funcTag, 136},
+ {"getcallersp", funcTag, 136},
{"racefuncenter", funcTag, 31},
{"racefuncexit", funcTag, 9},
{"raceread", funcTag, 31},
{"racewrite", funcTag, 31},
- {"racereadrange", funcTag, 136},
- {"racewriterange", funcTag, 136},
- {"msanread", funcTag, 136},
- {"msanwrite", funcTag, 136},
- {"msanmove", funcTag, 137},
- {"checkptrAlignment", funcTag, 138},
- {"checkptrArithmetic", funcTag, 140},
- {"libfuzzerTraceCmp1", funcTag, 141},
- {"libfuzzerTraceCmp2", funcTag, 142},
- {"libfuzzerTraceCmp4", funcTag, 143},
- {"libfuzzerTraceCmp8", funcTag, 144},
- {"libfuzzerTraceConstCmp1", funcTag, 141},
- {"libfuzzerTraceConstCmp2", funcTag, 142},
- {"libfuzzerTraceConstCmp4", funcTag, 143},
- {"libfuzzerTraceConstCmp8", funcTag, 144},
+ {"racereadrange", funcTag, 137},
+ {"racewriterange", funcTag, 137},
+ {"msanread", funcTag, 137},
+ {"msanwrite", funcTag, 137},
+ {"msanmove", funcTag, 138},
+ {"checkptrAlignment", funcTag, 139},
+ {"checkptrArithmetic", funcTag, 141},
+ {"libfuzzerTraceCmp1", funcTag, 142},
+ {"libfuzzerTraceCmp2", funcTag, 143},
+ {"libfuzzerTraceCmp4", funcTag, 144},
+ {"libfuzzerTraceCmp8", funcTag, 145},
+ {"libfuzzerTraceConstCmp1", funcTag, 142},
+ {"libfuzzerTraceConstCmp2", funcTag, 143},
+ {"libfuzzerTraceConstCmp4", funcTag, 144},
+ {"libfuzzerTraceConstCmp8", funcTag, 145},
{"x86HasPOPCNT", varTag, 6},
{"x86HasSSE41", varTag, 6},
{"x86HasFMA", varTag, 6},
@@ -224,7 +222,7 @@ func params(tlist ...*types.Type) []*types.Field {
}
func runtimeTypes() []*types.Type {
- var typs [145]*types.Type
+ var typs [146]*types.Type
typs[0] = types.ByteType
typs[1] = types.NewPtr(typs[0])
typs[2] = types.Types[types.TANY]
@@ -282,93 +280,94 @@ func runtimeTypes() []*types.Type {
typs[54] = newSig(params(typs[3], typs[15], typs[3], typs[15], typs[5]), params(typs[15]))
typs[55] = newSig(params(typs[28], typs[15]), params(typs[46], typs[15]))
typs[56] = newSig(params(typs[28]), params(typs[15]))
- typs[57] = newSig(params(typs[1], typs[2]), params(typs[2]))
- typs[58] = types.Types[types.TUINT16]
- typs[59] = newSig(params(typs[58]), params(typs[7]))
- typs[60] = types.Types[types.TUINT32]
+ typs[57] = types.NewPtr(typs[5])
+ typs[58] = newSig(params(typs[1], typs[57]), params(typs[57]))
+ typs[59] = newSig(params(typs[1], typs[3]), params(typs[7]))
+ typs[60] = types.Types[types.TUINT16]
typs[61] = newSig(params(typs[60]), params(typs[7]))
- typs[62] = newSig(params(typs[24]), params(typs[7]))
- typs[63] = newSig(params(typs[28]), params(typs[7]))
- typs[64] = types.Types[types.TUINT8]
- typs[65] = types.NewSlice(typs[64])
- typs[66] = newSig(params(typs[65]), params(typs[7]))
- typs[67] = newSig(params(typs[1], typs[3]), params(typs[2]))
- typs[68] = newSig(params(typs[1], typs[1]), params(typs[1]))
- typs[69] = newSig(params(typs[1], typs[1], typs[1]), nil)
- typs[70] = newSig(params(typs[1]), nil)
- typs[71] = types.NewPtr(typs[5])
- typs[72] = newSig(params(typs[71], typs[7], typs[7]), params(typs[6]))
- typs[73] = newSig(nil, params(typs[60]))
- typs[74] = types.NewMap(typs[2], typs[2])
- typs[75] = newSig(params(typs[1], typs[22], typs[3]), params(typs[74]))
- typs[76] = newSig(params(typs[1], typs[15], typs[3]), params(typs[74]))
- typs[77] = newSig(nil, params(typs[74]))
- typs[78] = newSig(params(typs[1], typs[74], typs[3]), params(typs[3]))
- typs[79] = newSig(params(typs[1], typs[74], typs[60]), params(typs[3]))
- typs[80] = newSig(params(typs[1], typs[74], typs[24]), params(typs[3]))
- typs[81] = newSig(params(typs[1], typs[74], typs[28]), params(typs[3]))
- typs[82] = newSig(params(typs[1], typs[74], typs[3], typs[1]), params(typs[3]))
- typs[83] = newSig(params(typs[1], typs[74], typs[3]), params(typs[3], typs[6]))
- typs[84] = newSig(params(typs[1], typs[74], typs[60]), params(typs[3], typs[6]))
- typs[85] = newSig(params(typs[1], typs[74], typs[24]), params(typs[3], typs[6]))
- typs[86] = newSig(params(typs[1], typs[74], typs[28]), params(typs[3], typs[6]))
- typs[87] = newSig(params(typs[1], typs[74], typs[3], typs[1]), params(typs[3], typs[6]))
- typs[88] = newSig(params(typs[1], typs[74], typs[7]), params(typs[3]))
- typs[89] = newSig(params(typs[1], typs[74], typs[3]), nil)
- typs[90] = newSig(params(typs[1], typs[74], typs[60]), nil)
- typs[91] = newSig(params(typs[1], typs[74], typs[24]), nil)
- typs[92] = newSig(params(typs[1], typs[74], typs[28]), nil)
- typs[93] = newSig(params(typs[3]), nil)
- typs[94] = newSig(params(typs[1], typs[74]), nil)
- typs[95] = types.NewChan(typs[2], types.Cboth)
- typs[96] = newSig(params(typs[1], typs[22]), params(typs[95]))
- typs[97] = newSig(params(typs[1], typs[15]), params(typs[95]))
- typs[98] = types.NewChan(typs[2], types.Crecv)
- typs[99] = newSig(params(typs[98], typs[3]), nil)
- typs[100] = newSig(params(typs[98], typs[3]), params(typs[6]))
- typs[101] = types.NewChan(typs[2], types.Csend)
- typs[102] = newSig(params(typs[101], typs[3]), nil)
- typs[103] = types.NewArray(typs[0], 3)
- typs[104] = types.NewStruct(types.NoPkg, []*types.Field{types.NewField(src.NoXPos, Lookup("enabled"), typs[6]), types.NewField(src.NoXPos, Lookup("pad"), typs[103]), types.NewField(src.NoXPos, Lookup("needed"), typs[6]), types.NewField(src.NoXPos, Lookup("cgo"), typs[6]), types.NewField(src.NoXPos, Lookup("alignme"), typs[24])})
- typs[105] = newSig(params(typs[1], typs[3], typs[3]), nil)
- typs[106] = newSig(params(typs[1], typs[3]), nil)
- typs[107] = newSig(params(typs[1], typs[3], typs[15], typs[3], typs[15]), params(typs[15]))
- typs[108] = newSig(params(typs[101], typs[3]), params(typs[6]))
- typs[109] = newSig(params(typs[3], typs[98]), params(typs[6], typs[6]))
- typs[110] = newSig(params(typs[71]), nil)
- typs[111] = newSig(params(typs[1], typs[1], typs[71], typs[15], typs[15], typs[6]), params(typs[15], typs[6]))
- typs[112] = newSig(params(typs[1], typs[15], typs[15]), params(typs[7]))
- typs[113] = newSig(params(typs[1], typs[22], typs[22]), params(typs[7]))
- typs[114] = newSig(params(typs[1], typs[15], typs[15], typs[7]), params(typs[7]))
- typs[115] = types.NewSlice(typs[2])
- typs[116] = newSig(params(typs[1], typs[115], typs[15]), params(typs[115]))
- typs[117] = newSig(params(typs[1], typs[7], typs[15]), nil)
- typs[118] = newSig(params(typs[1], typs[7], typs[22]), nil)
- typs[119] = newSig(params(typs[3], typs[3], typs[5]), nil)
- typs[120] = newSig(params(typs[7], typs[5]), nil)
- typs[121] = newSig(params(typs[3], typs[3], typs[5]), params(typs[6]))
- typs[122] = newSig(params(typs[3], typs[3]), params(typs[6]))
- typs[123] = newSig(params(typs[7], typs[7]), params(typs[6]))
- typs[124] = newSig(params(typs[7], typs[5], typs[5]), params(typs[5]))
- typs[125] = newSig(params(typs[7], typs[5]), params(typs[5]))
- typs[126] = newSig(params(typs[22], typs[22]), params(typs[22]))
- typs[127] = newSig(params(typs[24], typs[24]), params(typs[24]))
- typs[128] = newSig(params(typs[20]), params(typs[22]))
- typs[129] = newSig(params(typs[20]), params(typs[24]))
- typs[130] = newSig(params(typs[20]), params(typs[60]))
- typs[131] = newSig(params(typs[22]), params(typs[20]))
- typs[132] = newSig(params(typs[24]), params(typs[20]))
- typs[133] = newSig(params(typs[60]), params(typs[20]))
- typs[134] = newSig(params(typs[26], typs[26]), params(typs[26]))
- typs[135] = newSig(nil, params(typs[5]))
- typs[136] = newSig(params(typs[5], typs[5]), nil)
- typs[137] = newSig(params(typs[5], typs[5], typs[5]), nil)
- typs[138] = newSig(params(typs[7], typs[1], typs[5]), nil)
- typs[139] = types.NewSlice(typs[7])
- typs[140] = newSig(params(typs[7], typs[139]), nil)
- typs[141] = newSig(params(typs[64], typs[64]), nil)
- typs[142] = newSig(params(typs[58], typs[58]), nil)
+ typs[62] = types.Types[types.TUINT32]
+ typs[63] = newSig(params(typs[62]), params(typs[7]))
+ typs[64] = newSig(params(typs[24]), params(typs[7]))
+ typs[65] = newSig(params(typs[28]), params(typs[7]))
+ typs[66] = types.Types[types.TUINT8]
+ typs[67] = types.NewSlice(typs[66])
+ typs[68] = newSig(params(typs[67]), params(typs[7]))
+ typs[69] = newSig(params(typs[1], typs[1]), params(typs[1]))
+ typs[70] = newSig(params(typs[1], typs[2]), params(typs[2]))
+ typs[71] = newSig(params(typs[1], typs[1], typs[1]), nil)
+ typs[72] = newSig(params(typs[1]), nil)
+ typs[73] = newSig(params(typs[57], typs[7], typs[7]), params(typs[6]))
+ typs[74] = newSig(nil, params(typs[62]))
+ typs[75] = types.NewMap(typs[2], typs[2])
+ typs[76] = newSig(params(typs[1], typs[22], typs[3]), params(typs[75]))
+ typs[77] = newSig(params(typs[1], typs[15], typs[3]), params(typs[75]))
+ typs[78] = newSig(nil, params(typs[75]))
+ typs[79] = newSig(params(typs[1], typs[75], typs[3]), params(typs[3]))
+ typs[80] = newSig(params(typs[1], typs[75], typs[62]), params(typs[3]))
+ typs[81] = newSig(params(typs[1], typs[75], typs[24]), params(typs[3]))
+ typs[82] = newSig(params(typs[1], typs[75], typs[28]), params(typs[3]))
+ typs[83] = newSig(params(typs[1], typs[75], typs[3], typs[1]), params(typs[3]))
+ typs[84] = newSig(params(typs[1], typs[75], typs[3]), params(typs[3], typs[6]))
+ typs[85] = newSig(params(typs[1], typs[75], typs[62]), params(typs[3], typs[6]))
+ typs[86] = newSig(params(typs[1], typs[75], typs[24]), params(typs[3], typs[6]))
+ typs[87] = newSig(params(typs[1], typs[75], typs[28]), params(typs[3], typs[6]))
+ typs[88] = newSig(params(typs[1], typs[75], typs[3], typs[1]), params(typs[3], typs[6]))
+ typs[89] = newSig(params(typs[1], typs[75], typs[7]), params(typs[3]))
+ typs[90] = newSig(params(typs[1], typs[75], typs[3]), nil)
+ typs[91] = newSig(params(typs[1], typs[75], typs[62]), nil)
+ typs[92] = newSig(params(typs[1], typs[75], typs[24]), nil)
+ typs[93] = newSig(params(typs[1], typs[75], typs[28]), nil)
+ typs[94] = newSig(params(typs[3]), nil)
+ typs[95] = newSig(params(typs[1], typs[75]), nil)
+ typs[96] = types.NewChan(typs[2], types.Cboth)
+ typs[97] = newSig(params(typs[1], typs[22]), params(typs[96]))
+ typs[98] = newSig(params(typs[1], typs[15]), params(typs[96]))
+ typs[99] = types.NewChan(typs[2], types.Crecv)
+ typs[100] = newSig(params(typs[99], typs[3]), nil)
+ typs[101] = newSig(params(typs[99], typs[3]), params(typs[6]))
+ typs[102] = types.NewChan(typs[2], types.Csend)
+ typs[103] = newSig(params(typs[102], typs[3]), nil)
+ typs[104] = types.NewArray(typs[0], 3)
+ typs[105] = types.NewStruct(types.NoPkg, []*types.Field{types.NewField(src.NoXPos, Lookup("enabled"), typs[6]), types.NewField(src.NoXPos, Lookup("pad"), typs[104]), types.NewField(src.NoXPos, Lookup("needed"), typs[6]), types.NewField(src.NoXPos, Lookup("cgo"), typs[6]), types.NewField(src.NoXPos, Lookup("alignme"), typs[24])})
+ typs[106] = newSig(params(typs[1], typs[3], typs[3]), nil)
+ typs[107] = newSig(params(typs[1], typs[3]), nil)
+ typs[108] = newSig(params(typs[1], typs[3], typs[15], typs[3], typs[15]), params(typs[15]))
+ typs[109] = newSig(params(typs[102], typs[3]), params(typs[6]))
+ typs[110] = newSig(params(typs[3], typs[99]), params(typs[6], typs[6]))
+ typs[111] = newSig(params(typs[57]), nil)
+ typs[112] = newSig(params(typs[1], typs[1], typs[57], typs[15], typs[15], typs[6]), params(typs[15], typs[6]))
+ typs[113] = newSig(params(typs[1], typs[15], typs[15]), params(typs[7]))
+ typs[114] = newSig(params(typs[1], typs[22], typs[22]), params(typs[7]))
+ typs[115] = newSig(params(typs[1], typs[15], typs[15], typs[7]), params(typs[7]))
+ typs[116] = types.NewSlice(typs[2])
+ typs[117] = newSig(params(typs[1], typs[116], typs[15]), params(typs[116]))
+ typs[118] = newSig(params(typs[1], typs[7], typs[15]), nil)
+ typs[119] = newSig(params(typs[1], typs[7], typs[22]), nil)
+ typs[120] = newSig(params(typs[3], typs[3], typs[5]), nil)
+ typs[121] = newSig(params(typs[7], typs[5]), nil)
+ typs[122] = newSig(params(typs[3], typs[3], typs[5]), params(typs[6]))
+ typs[123] = newSig(params(typs[3], typs[3]), params(typs[6]))
+ typs[124] = newSig(params(typs[7], typs[7]), params(typs[6]))
+ typs[125] = newSig(params(typs[7], typs[5], typs[5]), params(typs[5]))
+ typs[126] = newSig(params(typs[7], typs[5]), params(typs[5]))
+ typs[127] = newSig(params(typs[22], typs[22]), params(typs[22]))
+ typs[128] = newSig(params(typs[24], typs[24]), params(typs[24]))
+ typs[129] = newSig(params(typs[20]), params(typs[22]))
+ typs[130] = newSig(params(typs[20]), params(typs[24]))
+ typs[131] = newSig(params(typs[20]), params(typs[62]))
+ typs[132] = newSig(params(typs[22]), params(typs[20]))
+ typs[133] = newSig(params(typs[24]), params(typs[20]))
+ typs[134] = newSig(params(typs[62]), params(typs[20]))
+ typs[135] = newSig(params(typs[26], typs[26]), params(typs[26]))
+ typs[136] = newSig(nil, params(typs[5]))
+ typs[137] = newSig(params(typs[5], typs[5]), nil)
+ typs[138] = newSig(params(typs[5], typs[5], typs[5]), nil)
+ typs[139] = newSig(params(typs[7], typs[1], typs[5]), nil)
+ typs[140] = types.NewSlice(typs[7])
+ typs[141] = newSig(params(typs[7], typs[140]), nil)
+ typs[142] = newSig(params(typs[66], typs[66]), nil)
typs[143] = newSig(params(typs[60], typs[60]), nil)
- typs[144] = newSig(params(typs[24], typs[24]), nil)
+ typs[144] = newSig(params(typs[62], typs[62]), nil)
+ typs[145] = newSig(params(typs[24], typs[24]), nil)
return typs[:]
}
diff --git a/src/cmd/compile/internal/typecheck/builtin/runtime.go b/src/cmd/compile/internal/typecheck/builtin/runtime.go
index 2b29ea3c08..605b904288 100644
--- a/src/cmd/compile/internal/typecheck/builtin/runtime.go
+++ b/src/cmd/compile/internal/typecheck/builtin/runtime.go
@@ -84,10 +84,15 @@ func decoderune(string, int) (retv rune, retk int)
func countrunes(string) int
// Non-empty-interface to non-empty-interface conversion.
-func convI2I(typ *byte, elem any) (ret any)
+func convI2I(typ *byte, itab *uintptr) (ret *uintptr)
-// Specialized type-to-interface conversion.
-// These return only a data pointer.
+// Convert non-interface type to the data word of a (empty or nonempty) interface.
+func convT(typ *byte, elem *any) unsafe.Pointer
+
+// Same as convT, for types with no pointers in them.
+func convTnoptr(typ *byte, elem *any) unsafe.Pointer
+
+// Specialized versions of convT for specific types.
// These functions take concrete types in the runtime. But they may
// be used for a wider range of types, which have the same memory
// layout as the parameter type. The compiler converts the
@@ -99,14 +104,6 @@ func convT64(val uint64) unsafe.Pointer
func convTstring(val string) unsafe.Pointer
func convTslice(val []uint8) unsafe.Pointer
-// Type to empty-interface conversion.
-func convT2E(typ *byte, elem *any) (ret any)
-func convT2Enoptr(typ *byte, elem *any) (ret any)
-
-// Type to non-empty-interface conversion.
-func convT2I(tab *byte, elem *any) (ret any)
-func convT2Inoptr(tab *byte, elem *any) (ret any)
-
// interface type assertions x.(T)
func assertE2I(inter *byte, typ *byte) *byte
func assertE2I2(inter *byte, eface any) (ret any)
diff --git a/src/cmd/compile/internal/typecheck/const.go b/src/cmd/compile/internal/typecheck/const.go
index 761b043794..c27cf0e646 100644
--- a/src/cmd/compile/internal/typecheck/const.go
+++ b/src/cmd/compile/internal/typecheck/const.go
@@ -881,7 +881,9 @@ func evalunsafe(n ir.Node) int64 {
case ir.OOFFSETOF:
// must be a selector.
n := n.(*ir.UnaryExpr)
- if n.X.Op() != ir.OXDOT {
+ // ODOT and ODOTPTR are allowed in case the OXDOT transformation has
+ // already happened (e.g. during -G=3 stenciling).
+ if n.X.Op() != ir.OXDOT && n.X.Op() != ir.ODOT && n.X.Op() != ir.ODOTPTR {
base.Errorf("invalid expression %v", n)
return 0
}
@@ -901,7 +903,7 @@ func evalunsafe(n ir.Node) int64 {
switch tsel.Op() {
case ir.ODOT, ir.ODOTPTR:
break
- case ir.OCALLPART:
+ case ir.OMETHVALUE:
base.Errorf("invalid expression %v: argument is a method value", n)
return 0
default:
diff --git a/src/cmd/compile/internal/typecheck/crawler.go b/src/cmd/compile/internal/typecheck/crawler.go
new file mode 100644
index 0000000000..9a348b9f37
--- /dev/null
+++ b/src/cmd/compile/internal/typecheck/crawler.go
@@ -0,0 +1,231 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typecheck
+
+import (
+ "cmd/compile/internal/base"
+ "cmd/compile/internal/ir"
+ "cmd/compile/internal/types"
+)
+
+// crawlExports crawls the type/object graph rooted at the given list
+// of exported objects. Any functions that are found to be potentially
+// callable by importers are marked with ExportInline so that
+// iexport.go knows to re-export their inline body.
+func crawlExports(exports []*ir.Name) {
+ p := crawler{
+ marked: make(map[*types.Type]bool),
+ embedded: make(map[*types.Type]bool),
+ }
+ for _, n := range exports {
+ p.markObject(n)
+ }
+}
+
+type crawler struct {
+ marked map[*types.Type]bool // types already seen by markType
+ embedded map[*types.Type]bool // types already seen by markEmbed
+}
+
+// markObject visits a reachable object.
+func (p *crawler) markObject(n *ir.Name) {
+ if n.Op() == ir.ONAME && n.Class == ir.PFUNC {
+ p.markInlBody(n)
+ }
+
+ // If a declared type name is reachable, users can embed it in their
+ // own types, which makes even its unexported methods reachable.
+ if n.Op() == ir.OTYPE {
+ p.markEmbed(n.Type())
+ }
+
+ p.markType(n.Type())
+}
+
+// markType recursively visits types reachable from t to identify
+// functions whose inline bodies may be needed.
+func (p *crawler) markType(t *types.Type) {
+ if t.IsInstantiatedGeneric() {
+ // Re-instantiated types don't add anything new, so don't follow them.
+ return
+ }
+ if p.marked[t] {
+ return
+ }
+ p.marked[t] = true
+
+ // If this is a defined type, mark all of its associated
+ // methods. Skip interface types because t.Methods contains
+ // only their unexpanded method set (i.e., exclusive of
+ // interface embeddings), and the switch statement below
+ // handles their full method set.
+ if t.Sym() != nil && t.Kind() != types.TINTER {
+ for _, m := range t.Methods().Slice() {
+ if types.IsExported(m.Sym.Name) {
+ p.markObject(m.Nname.(*ir.Name))
+ }
+ }
+ }
+
+ // Recursively mark any types that can be produced given a
+ // value of type t: dereferencing a pointer; indexing or
+ // iterating over an array, slice, or map; receiving from a
+ // channel; accessing a struct field or interface method; or
+ // calling a function.
+ //
+ // Notably, we don't mark function parameter types, because
+ // the user already needs some way to construct values of
+ // those types.
+ switch t.Kind() {
+ case types.TPTR, types.TARRAY, types.TSLICE:
+ p.markType(t.Elem())
+
+ case types.TCHAN:
+ if t.ChanDir().CanRecv() {
+ p.markType(t.Elem())
+ }
+
+ case types.TMAP:
+ p.markType(t.Key())
+ p.markType(t.Elem())
+
+ case types.TSTRUCT:
+ for _, f := range t.FieldSlice() {
+ if types.IsExported(f.Sym.Name) || f.Embedded != 0 {
+ p.markType(f.Type)
+ }
+ }
+
+ case types.TFUNC:
+ for _, f := range t.Results().FieldSlice() {
+ p.markType(f.Type)
+ }
+
+ case types.TINTER:
+ // TODO(danscales) - will have to deal with the types in interface
+ // elements here when implemented in types2 and represented in types1.
+ for _, f := range t.AllMethods().Slice() {
+ if types.IsExported(f.Sym.Name) {
+ p.markType(f.Type)
+ }
+ }
+
+ case types.TTYPEPARAM:
+ // No other type that needs to be followed.
+ }
+}
+
+// markEmbed is similar to markType, but handles finding methods that
+// need to be re-exported because t can be embedded in user code
+// (possibly transitively).
+func (p *crawler) markEmbed(t *types.Type) {
+ if t.IsPtr() {
+ // Defined pointer type; not allowed to embed anyway.
+ if t.Sym() != nil {
+ return
+ }
+ t = t.Elem()
+ }
+
+ if t.IsInstantiatedGeneric() {
+ // Re-instantiated types don't add anything new, so don't follow them.
+ return
+ }
+
+ if p.embedded[t] {
+ return
+ }
+ p.embedded[t] = true
+
+ // If t is a defined type, then re-export all of its methods. Unlike
+ // in markType, we include even unexported methods here, because we
+ // still need to generate wrappers for them, even if the user can't
+ // refer to them directly.
+ if t.Sym() != nil && t.Kind() != types.TINTER {
+ for _, m := range t.Methods().Slice() {
+ p.markObject(m.Nname.(*ir.Name))
+ }
+ }
+
+ // If t is a struct, recursively visit its embedded fields.
+ if t.IsStruct() {
+ for _, f := range t.FieldSlice() {
+ if f.Embedded != 0 {
+ p.markEmbed(f.Type)
+ }
+ }
+ }
+}
+
+// markInlBody marks n's inline body for export and recursively
+// ensures all called functions are marked too.
+func (p *crawler) markInlBody(n *ir.Name) {
+ if n == nil {
+ return
+ }
+ if n.Op() != ir.ONAME || n.Class != ir.PFUNC {
+ base.Fatalf("markInlBody: unexpected %v, %v, %v", n, n.Op(), n.Class)
+ }
+ fn := n.Func
+ if fn == nil {
+ base.Fatalf("markInlBody: missing Func on %v", n)
+ }
+ if fn.Inl == nil {
+ return
+ }
+
+ if fn.ExportInline() {
+ return
+ }
+ fn.SetExportInline(true)
+
+ ImportedBody(fn)
+
+ var doFlood func(n ir.Node)
+ doFlood = func(n ir.Node) {
+ switch n.Op() {
+ case ir.OMETHEXPR, ir.ODOTMETH:
+ p.markInlBody(ir.MethodExprName(n))
+
+ case ir.ONAME:
+ n := n.(*ir.Name)
+ switch n.Class {
+ case ir.PFUNC:
+ p.markInlBody(n)
+ Export(n)
+ case ir.PEXTERN:
+ Export(n)
+ }
+ p.checkGenericType(n.Type())
+ case ir.OTYPE:
+ p.checkGenericType(n.Type())
+ case ir.OMETHVALUE:
+ // Okay, because we don't yet inline indirect
+ // calls to method values.
+ case ir.OCLOSURE:
+ // VisitList doesn't visit closure bodies, so force a
+ // recursive call to VisitList on the body of the closure.
+ ir.VisitList(n.(*ir.ClosureExpr).Func.Body, doFlood)
+ }
+ }
+
+ // Recursively identify all referenced functions for
+ // reexport. We want to include even non-called functions,
+ // because after inlining they might be callable.
+ ir.VisitList(fn.Inl.Body, doFlood)
+}
+
+// checkGenerictype ensures that we call markType() on any base generic type that
+// is written to the export file (even if not explicitly marked
+// for export), so its methods will be available for inlining if needed.
+func (p *crawler) checkGenericType(t *types.Type) {
+ if t != nil && t.HasTParam() {
+ if t.OrigSym != nil {
+ // Convert to the base generic type.
+ t = t.OrigSym.Def.Type()
+ }
+ p.markType(t)
+ }
+}
diff --git a/src/cmd/compile/internal/typecheck/dcl.go b/src/cmd/compile/internal/typecheck/dcl.go
index 5b771e3c0b..472d8d2b8a 100644
--- a/src/cmd/compile/internal/typecheck/dcl.go
+++ b/src/cmd/compile/internal/typecheck/dcl.go
@@ -6,7 +6,7 @@ package typecheck
import (
"fmt"
- "strconv"
+ "sync"
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
@@ -363,12 +363,10 @@ func funcargs(nt *ir.FuncType) {
}
// declare the out arguments.
- gen := len(nt.Params)
- for _, n := range nt.Results {
+ for i, n := range nt.Results {
if n.Sym == nil {
// Name so that escape analysis can track it. ~r stands for 'result'.
- n.Sym = LookupNum("~r", gen)
- gen++
+ n.Sym = LookupNum("~r", i)
}
if n.Sym.IsBlank() {
// Give it a name so we can assign to it during return. ~b stands for 'blank'.
@@ -377,8 +375,7 @@ func funcargs(nt *ir.FuncType) {
// func g() int
// f is allowed to use a plain 'return' with no arguments, while g is not.
// So the two cases must be distinguished.
- n.Sym = LookupNum("~b", gen)
- gen++
+ n.Sym = LookupNum("~b", i)
}
funcarg(n, ir.PPARAMOUT)
@@ -431,6 +428,7 @@ func TempAt(pos src.XPos, curfn *ir.Func, t *types.Type) *ir.Name {
n := ir.NewNameAt(pos, s)
s.Def = n
n.SetType(t)
+ n.SetTypecheck(1)
n.Class = ir.PAUTO
n.SetEsc(ir.EscNever)
n.Curfn = curfn
@@ -443,20 +441,50 @@ func TempAt(pos src.XPos, curfn *ir.Func, t *types.Type) *ir.Name {
return n
}
+var (
+ autotmpnamesmu sync.Mutex
+ autotmpnames []string
+)
+
// autotmpname returns the name for an autotmp variable numbered n.
func autotmpname(n int) string {
- // Give each tmp a different name so that they can be registerized.
- // Add a preceding . to avoid clashing with legal names.
- const prefix = ".autotmp_"
- // Start with a buffer big enough to hold a large n.
- b := []byte(prefix + " ")[:len(prefix)]
- b = strconv.AppendInt(b, int64(n), 10)
- return types.InternString(b)
+ autotmpnamesmu.Lock()
+ defer autotmpnamesmu.Unlock()
+
+ // Grow autotmpnames, if needed.
+ if n >= len(autotmpnames) {
+ autotmpnames = append(autotmpnames, make([]string, n+1-len(autotmpnames))...)
+ autotmpnames = autotmpnames[:cap(autotmpnames)]
+ }
+
+ s := autotmpnames[n]
+ if s == "" {
+ // Give each tmp a different name so that they can be registerized.
+ // Add a preceding . to avoid clashing with legal names.
+ prefix := ".autotmp_%d"
+
+ // In quirks mode, pad out the number to stabilize variable
+ // sorting. This ensures autotmps 8 and 9 sort the same way even
+ // if they get renumbered to 9 and 10, respectively.
+ if base.Debug.UnifiedQuirks != 0 {
+ prefix = ".autotmp_%06d"
+ }
+
+ s = fmt.Sprintf(prefix, n)
+ autotmpnames[n] = s
+ }
+ return s
}
// f is method type, with receiver.
// return function type, receiver as first argument (or not).
func NewMethodType(sig *types.Type, recv *types.Type) *types.Type {
+ if sig.HasTParam() {
+ base.Fatalf("NewMethodType with type parameters in signature %+v", sig)
+ }
+ if recv != nil && recv.HasTParam() {
+ base.Fatalf("NewMethodType with type parameters in receiver %+v", recv)
+ }
nrecvs := 0
if recv != nil {
nrecvs++
diff --git a/src/cmd/compile/internal/typecheck/export.go b/src/cmd/compile/internal/typecheck/export.go
index 63d0a1ec6c..30726d4327 100644
--- a/src/cmd/compile/internal/typecheck/export.go
+++ b/src/cmd/compile/internal/typecheck/export.go
@@ -15,22 +15,22 @@ import (
// importalias declares symbol s as an imported type alias with type t.
// ipkg is the package being imported
-func importalias(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) *ir.Name {
- return importobj(ipkg, pos, s, ir.OTYPE, ir.PEXTERN, t)
+func importalias(pos src.XPos, s *types.Sym, t *types.Type) *ir.Name {
+ return importobj(pos, s, ir.OTYPE, ir.PEXTERN, t)
}
// importconst declares symbol s as an imported constant with type t and value val.
// ipkg is the package being imported
-func importconst(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type, val constant.Value) *ir.Name {
- n := importobj(ipkg, pos, s, ir.OLITERAL, ir.PEXTERN, t)
+func importconst(pos src.XPos, s *types.Sym, t *types.Type, val constant.Value) *ir.Name {
+ n := importobj(pos, s, ir.OLITERAL, ir.PEXTERN, t)
n.SetVal(val)
return n
}
// importfunc declares symbol s as an imported function with type t.
// ipkg is the package being imported
-func importfunc(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) *ir.Name {
- n := importobj(ipkg, pos, s, ir.ONAME, ir.PFUNC, t)
+func importfunc(pos src.XPos, s *types.Sym, t *types.Type) *ir.Name {
+ n := importobj(pos, s, ir.ONAME, ir.PFUNC, t)
n.Func = ir.NewFunc(pos)
n.Func.Nname = n
return n
@@ -38,8 +38,8 @@ func importfunc(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) *ir.
// importobj declares symbol s as an imported object representable by op.
// ipkg is the package being imported
-func importobj(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op ir.Op, ctxt ir.Class, t *types.Type) *ir.Name {
- n := importsym(ipkg, pos, s, op, ctxt)
+func importobj(pos src.XPos, s *types.Sym, op ir.Op, ctxt ir.Class, t *types.Type) *ir.Name {
+ n := importsym(pos, s, op, ctxt)
n.SetType(t)
if ctxt == ir.PFUNC {
n.Sym().SetFunc(true)
@@ -47,7 +47,7 @@ func importobj(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op ir.Op, ctxt ir.Cl
return n
}
-func importsym(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op ir.Op, ctxt ir.Class) *ir.Name {
+func importsym(pos src.XPos, s *types.Sym, op ir.Op, ctxt ir.Class) *ir.Name {
if n := s.PkgDef(); n != nil {
base.Fatalf("importsym of symbol that already exists: %v", n)
}
@@ -61,14 +61,14 @@ func importsym(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op ir.Op, ctxt ir.Cl
// importtype returns the named type declared by symbol s.
// If no such type has been declared yet, a forward declaration is returned.
// ipkg is the package being imported
-func importtype(ipkg *types.Pkg, pos src.XPos, s *types.Sym) *ir.Name {
- n := importsym(ipkg, pos, s, ir.OTYPE, ir.PEXTERN)
+func importtype(pos src.XPos, s *types.Sym) *ir.Name {
+ n := importsym(pos, s, ir.OTYPE, ir.PEXTERN)
n.SetType(types.NewNamed(n))
return n
}
// importvar declares symbol s as an imported variable with type t.
// ipkg is the package being imported
-func importvar(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) *ir.Name {
- return importobj(ipkg, pos, s, ir.ONAME, ir.PEXTERN, t)
+func importvar(pos src.XPos, s *types.Sym, t *types.Type) *ir.Name {
+ return importobj(pos, s, ir.ONAME, ir.PEXTERN, t)
}
diff --git a/src/cmd/compile/internal/typecheck/expr.go b/src/cmd/compile/internal/typecheck/expr.go
index 24d141e8a2..7e974dfda8 100644
--- a/src/cmd/compile/internal/typecheck/expr.go
+++ b/src/cmd/compile/internal/typecheck/expr.go
@@ -311,14 +311,23 @@ func tcCompLit(n *ir.CompLitExpr) (res ir.Node) {
f := t.Field(i)
s := f.Sym
- if s != nil && !types.IsExported(s.Name) && s.Pkg != types.LocalPkg {
- base.Errorf("implicit assignment of unexported field '%s' in %v literal", s.Name, t)
+
+ // Do the test for assigning to unexported fields.
+ // But if this is an instantiated function, then
+ // the function has already been typechecked. In
+ // that case, don't do the test, since it can fail
+ // for the closure structs created in
+ // walkClosure(), because the instantiated
+ // function is compiled as if in the source
+ // package of the generic function.
+ if !(ir.CurFunc != nil && strings.Index(ir.CurFunc.Nname.Sym().Name, "[") >= 0) {
+ if s != nil && !types.IsExported(s.Name) && s.Pkg != types.LocalPkg {
+ base.Errorf("implicit assignment of unexported field '%s' in %v literal", s.Name, t)
+ }
}
// No pushtype allowed here. Must name fields for that.
n1 = AssignConv(n1, f.Type, "field value")
- sk := ir.NewStructKeyExpr(base.Pos, f.Sym, n1)
- sk.Offset = f.Offset
- ls[i] = sk
+ ls[i] = ir.NewStructKeyExpr(base.Pos, f, n1)
}
if len(ls) < t.NumFields() {
base.Errorf("too few values in %v", n)
@@ -328,77 +337,33 @@ func tcCompLit(n *ir.CompLitExpr) (res ir.Node) {
// keyed list
ls := n.List
- for i, l := range ls {
- ir.SetPos(l)
-
- if l.Op() == ir.OKEY {
- kv := l.(*ir.KeyExpr)
- key := kv.Key
-
- // Sym might have resolved to name in other top-level
- // package, because of import dot. Redirect to correct sym
- // before we do the lookup.
- s := key.Sym()
- if id, ok := key.(*ir.Ident); ok && DotImportRefs[id] != nil {
- s = Lookup(s.Name)
- }
-
- // An OXDOT uses the Sym field to hold
- // the field to the right of the dot,
- // so s will be non-nil, but an OXDOT
- // is never a valid struct literal key.
- if s == nil || s.Pkg != types.LocalPkg || key.Op() == ir.OXDOT || s.IsBlank() {
- base.Errorf("invalid field name %v in struct initializer", key)
- continue
- }
-
- l = ir.NewStructKeyExpr(l.Pos(), s, kv.Value)
- ls[i] = l
- }
-
- if l.Op() != ir.OSTRUCTKEY {
- if !errored {
- base.Errorf("mixture of field:value and value initializers")
- errored = true
- }
- ls[i] = Expr(ls[i])
- continue
- }
- l := l.(*ir.StructKeyExpr)
-
- f := Lookdot1(nil, l.Field, t, t.Fields(), 0)
- if f == nil {
- if ci := Lookdot1(nil, l.Field, t, t.Fields(), 2); ci != nil { // Case-insensitive lookup.
- if visible(ci.Sym) {
- base.Errorf("unknown field '%v' in struct literal of type %v (but does have %v)", l.Field, t, ci.Sym)
- } else if nonexported(l.Field) && l.Field.Name == ci.Sym.Name { // Ensure exactness before the suggestion.
- base.Errorf("cannot refer to unexported field '%v' in struct literal of type %v", l.Field, t)
- } else {
- base.Errorf("unknown field '%v' in struct literal of type %v", l.Field, t)
+ for i, n := range ls {
+ ir.SetPos(n)
+
+ sk, ok := n.(*ir.StructKeyExpr)
+ if !ok {
+ kv, ok := n.(*ir.KeyExpr)
+ if !ok {
+ if !errored {
+ base.Errorf("mixture of field:value and value initializers")
+ errored = true
}
+ ls[i] = Expr(n)
continue
}
- var f *types.Field
- p, _ := dotpath(l.Field, t, &f, true)
- if p == nil || f.IsMethod() {
- base.Errorf("unknown field '%v' in struct literal of type %v", l.Field, t)
+
+ sk = tcStructLitKey(t, kv)
+ if sk == nil {
continue
}
- // dotpath returns the parent embedded types in reverse order.
- var ep []string
- for ei := len(p) - 1; ei >= 0; ei-- {
- ep = append(ep, p[ei].field.Sym.Name)
- }
- ep = append(ep, l.Field.Name)
- base.Errorf("cannot use promoted field %v in struct literal of type %v", strings.Join(ep, "."), t)
- continue
+
+ fielddup(sk.Sym().Name, hash)
}
- fielddup(f.Sym.Name, hash)
- l.Offset = f.Offset
// No pushtype allowed here. Tried and rejected.
- l.Value = Expr(l.Value)
- l.Value = AssignConv(l.Value, f.Type, "field value")
+ sk.Value = Expr(sk.Value)
+ sk.Value = AssignConv(sk.Value, sk.Field.Type, "field value")
+ ls[i] = sk
}
}
@@ -409,6 +374,60 @@ func tcCompLit(n *ir.CompLitExpr) (res ir.Node) {
return n
}
+// tcStructLitKey typechecks an OKEY node that appeared within a
+// struct literal.
+func tcStructLitKey(typ *types.Type, kv *ir.KeyExpr) *ir.StructKeyExpr {
+ key := kv.Key
+
+ // Sym might have resolved to name in other top-level
+ // package, because of import dot. Redirect to correct sym
+ // before we do the lookup.
+ sym := key.Sym()
+ if id, ok := key.(*ir.Ident); ok && DotImportRefs[id] != nil {
+ sym = Lookup(sym.Name)
+ }
+
+ // An OXDOT uses the Sym field to hold
+ // the field to the right of the dot,
+ // so s will be non-nil, but an OXDOT
+ // is never a valid struct literal key.
+ if sym == nil || sym.Pkg != types.LocalPkg || key.Op() == ir.OXDOT || sym.IsBlank() {
+ base.Errorf("invalid field name %v in struct initializer", key)
+ return nil
+ }
+
+ if f := Lookdot1(nil, sym, typ, typ.Fields(), 0); f != nil {
+ return ir.NewStructKeyExpr(kv.Pos(), f, kv.Value)
+ }
+
+ if ci := Lookdot1(nil, sym, typ, typ.Fields(), 2); ci != nil { // Case-insensitive lookup.
+ if visible(ci.Sym) {
+ base.Errorf("unknown field '%v' in struct literal of type %v (but does have %v)", sym, typ, ci.Sym)
+ } else if nonexported(sym) && sym.Name == ci.Sym.Name { // Ensure exactness before the suggestion.
+ base.Errorf("cannot refer to unexported field '%v' in struct literal of type %v", sym, typ)
+ } else {
+ base.Errorf("unknown field '%v' in struct literal of type %v", sym, typ)
+ }
+ return nil
+ }
+
+ var f *types.Field
+ p, _ := dotpath(sym, typ, &f, true)
+ if p == nil || f.IsMethod() {
+ base.Errorf("unknown field '%v' in struct literal of type %v", sym, typ)
+ return nil
+ }
+
+ // dotpath returns the parent embedded types in reverse order.
+ var ep []string
+ for ei := len(p) - 1; ei >= 0; ei-- {
+ ep = append(ep, p[ei].field.Sym.Name)
+ }
+ ep = append(ep, sym.Name)
+ base.Errorf("cannot use promoted field %v in struct literal of type %v", strings.Join(ep, "."), typ)
+ return nil
+}
+
// tcConv typechecks an OCONV node.
func tcConv(n *ir.ConvExpr) ir.Node {
types.CheckSize(n.Type()) // ensure width is calculated for backend
@@ -522,8 +541,8 @@ func tcDot(n *ir.SelectorExpr, top int) ir.Node {
}
if (n.Op() == ir.ODOTINTER || n.Op() == ir.ODOTMETH) && top&ctxCallee == 0 {
- n.SetOp(ir.OCALLPART)
- n.SetType(MethodValueWrapper(n).Type())
+ n.SetOp(ir.OMETHVALUE)
+ n.SetType(NewMethodType(n.Type(), nil))
}
return n
}
diff --git a/src/cmd/compile/internal/typecheck/func.go b/src/cmd/compile/internal/typecheck/func.go
index fbcc784627..7dec65c1d6 100644
--- a/src/cmd/compile/internal/typecheck/func.go
+++ b/src/cmd/compile/internal/typecheck/func.go
@@ -8,28 +8,29 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
+ "cmd/internal/src"
"fmt"
"go/constant"
"go/token"
)
-// package all the arguments that match a ... T parameter into a []T.
-func MakeDotArgs(typ *types.Type, args []ir.Node) ir.Node {
+// MakeDotArgs package all the arguments that match a ... T parameter into a []T.
+func MakeDotArgs(pos src.XPos, typ *types.Type, args []ir.Node) ir.Node {
var n ir.Node
if len(args) == 0 {
- n = NodNil()
+ n = ir.NewNilExpr(pos)
n.SetType(typ)
} else {
- lit := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(typ), nil)
- lit.List.Append(args...)
+ args = append([]ir.Node(nil), args...)
+ lit := ir.NewCompLitExpr(pos, ir.OCOMPLIT, ir.TypeNode(typ), args)
lit.SetImplicit(true)
n = lit
}
n = Expr(n)
if n.Type() == nil {
- base.Fatalf("mkdotargslice: typecheck failed")
+ base.FatalfAt(pos, "mkdotargslice: typecheck failed")
}
return n
}
@@ -47,7 +48,7 @@ func FixVariadicCall(call *ir.CallExpr) {
args := call.Args
extra := args[vi:]
- slice := MakeDotArgs(vt, extra)
+ slice := MakeDotArgs(call.Pos(), vt, extra)
for i := range extra {
extra[i] = nil // allow GC
}
@@ -56,6 +57,25 @@ func FixVariadicCall(call *ir.CallExpr) {
call.IsDDD = true
}
+// FixMethodCall rewrites a method call t.M(...) into a function call T.M(t, ...).
+func FixMethodCall(call *ir.CallExpr) {
+ if call.X.Op() != ir.ODOTMETH {
+ return
+ }
+
+ dot := call.X.(*ir.SelectorExpr)
+
+ fn := Expr(ir.NewSelectorExpr(dot.Pos(), ir.OXDOT, ir.TypeNode(dot.X.Type()), dot.Selection.Sym))
+
+ args := make([]ir.Node, 1+len(call.Args))
+ args[0] = dot.X
+ copy(args[1:], call.Args)
+
+ call.SetOp(ir.OCALLFUNC)
+ call.X = fn
+ call.Args = args
+}
+
// ClosureType returns the struct type used to hold all the information
// needed in the closure for clo (clo must be a OCLOSURE node).
// The address of a variable of the returned type can be cast to a func.
@@ -73,8 +93,25 @@ func ClosureType(clo *ir.ClosureExpr) *types.Type {
// The information appears in the binary in the form of type descriptors;
// the struct is unnamed so that closures in multiple packages with the
// same struct type can share the descriptor.
+
+ // Make sure the .F field is in the same package as the rest of the
+ // fields. This deals with closures in instantiated functions, which are
+ // compiled as if from the source package of the generic function.
+ var pkg *types.Pkg
+ if len(clo.Func.ClosureVars) == 0 {
+ pkg = types.LocalPkg
+ } else {
+ for _, v := range clo.Func.ClosureVars {
+ if pkg == nil {
+ pkg = v.Sym().Pkg
+ } else if pkg != v.Sym().Pkg {
+ base.Fatalf("Closure variables from multiple packages")
+ }
+ }
+ }
+
fields := []*types.Field{
- types.NewField(base.Pos, Lookup(".F"), types.Types[types.TUINTPTR]),
+ types.NewField(base.Pos, pkg.Lookup(".F"), types.Types[types.TUINTPTR]),
}
for _, v := range clo.Func.ClosureVars {
typ := v.Type()
@@ -88,10 +125,10 @@ func ClosureType(clo *ir.ClosureExpr) *types.Type {
return typ
}
-// PartialCallType returns the struct type used to hold all the information
-// needed in the closure for n (n must be a OCALLPART node).
-// The address of a variable of the returned type can be cast to a func.
-func PartialCallType(n *ir.SelectorExpr) *types.Type {
+// MethodValueType returns the struct type used to hold all the information
+// needed in the closure for a OMETHVALUE node. The address of a variable of
+// the returned type can be cast to a func.
+func MethodValueType(n *ir.SelectorExpr) *types.Type {
t := types.NewStruct(types.NoPkg, []*types.Field{
types.NewField(base.Pos, Lookup("F"), types.Types[types.TUINTPTR]),
types.NewField(base.Pos, Lookup("R"), n.X.Type()),
@@ -181,153 +218,38 @@ func fnpkg(fn *ir.Name) *types.Pkg {
return fn.Sym().Pkg
}
-// ClosureName generates a new unique name for a closure within
-// outerfunc.
-func ClosureName(outerfunc *ir.Func) *types.Sym {
- outer := "glob."
- prefix := "func"
- gen := &globClosgen
-
- if outerfunc != nil {
- if outerfunc.OClosure != nil {
- prefix = ""
- }
-
- outer = ir.FuncName(outerfunc)
-
- // There may be multiple functions named "_". In those
- // cases, we can't use their individual Closgens as it
- // would lead to name clashes.
- if !ir.IsBlank(outerfunc.Nname) {
- gen = &outerfunc.Closgen
- }
- }
-
- *gen++
- return Lookup(fmt.Sprintf("%s.%s%d", outer, prefix, *gen))
-}
-
-// globClosgen is like Func.Closgen, but for the global scope.
-var globClosgen int32
-
-// MethodValueWrapper returns the DCLFUNC node representing the
-// wrapper function (*-fm) needed for the given method value. If the
-// wrapper function hasn't already been created yet, it's created and
-// added to Target.Decls.
-//
-// TODO(mdempsky): Move into walk. This isn't part of type checking.
-func MethodValueWrapper(dot *ir.SelectorExpr) *ir.Func {
- if dot.Op() != ir.OCALLPART {
- base.Fatalf("MethodValueWrapper: unexpected %v (%v)", dot, dot.Op())
- }
-
- t0 := dot.Type()
- meth := dot.Sel
- rcvrtype := dot.X.Type()
- sym := ir.MethodSymSuffix(rcvrtype, meth, "-fm")
-
- if sym.Uniq() {
- return sym.Def.(*ir.Func)
- }
- sym.SetUniq(true)
-
- savecurfn := ir.CurFunc
- saveLineNo := base.Pos
- ir.CurFunc = nil
-
- // Set line number equal to the line number where the method is declared.
- if pos := dot.Selection.Pos; pos.IsKnown() {
- base.Pos = pos
- }
- // Note: !dot.Selection.Pos.IsKnown() happens for method expressions where
- // the method is implicitly declared. The Error method of the
- // built-in error type is one such method. We leave the line
- // number at the use of the method expression in this
- // case. See issue 29389.
-
- tfn := ir.NewFuncType(base.Pos, nil,
- NewFuncParams(t0.Params(), true),
- NewFuncParams(t0.Results(), false))
-
- fn := DeclFunc(sym, tfn)
- fn.SetDupok(true)
- fn.SetNeedctxt(true)
- fn.SetWrapper(true)
-
- // Declare and initialize variable holding receiver.
- ptr := ir.NewNameAt(base.Pos, Lookup(".this"))
- ptr.Class = ir.PAUTOHEAP
- ptr.SetType(rcvrtype)
- ptr.Curfn = fn
- ptr.SetIsClosureVar(true)
- ptr.SetByval(true)
- fn.ClosureVars = append(fn.ClosureVars, ptr)
-
- call := ir.NewCallExpr(base.Pos, ir.OCALL, ir.NewSelectorExpr(base.Pos, ir.OXDOT, ptr, meth), nil)
- call.Args = ir.ParamNames(tfn.Type())
- call.IsDDD = tfn.Type().IsVariadic()
-
- var body ir.Node = call
- if t0.NumResults() != 0 {
- ret := ir.NewReturnStmt(base.Pos, nil)
- ret.Results = []ir.Node{call}
- body = ret
- }
-
- fn.Body = []ir.Node{body}
- FinishFuncBody()
-
- Func(fn)
- // Need to typecheck the body of the just-generated wrapper.
- // typecheckslice() requires that Curfn is set when processing an ORETURN.
- ir.CurFunc = fn
- Stmts(fn.Body)
- sym.Def = fn
- Target.Decls = append(Target.Decls, fn)
- ir.CurFunc = savecurfn
- base.Pos = saveLineNo
-
- return fn
-}
-
// tcClosure typechecks an OCLOSURE node. It also creates the named
// function associated with the closure.
// TODO: This creation of the named function should probably really be done in a
// separate pass from type-checking.
-func tcClosure(clo *ir.ClosureExpr, top int) {
+func tcClosure(clo *ir.ClosureExpr, top int) ir.Node {
fn := clo.Func
+
+ // We used to allow IR builders to typecheck the underlying Func
+ // themselves, but that led to too much variety and inconsistency
+ // around who's responsible for naming the function, typechecking
+ // it, or adding it to Target.Decls.
+ //
+ // It's now all or nothing. Callers are still allowed to do these
+ // themselves, but then they assume responsibility for all of them.
+ if fn.Typecheck() == 1 {
+ base.FatalfAt(fn.Pos(), "underlying closure func already typechecked: %v", fn)
+ }
+
// Set current associated iota value, so iota can be used inside
// function in ConstSpec, see issue #22344
if x := getIotaValue(); x >= 0 {
fn.Iota = x
}
- fn.SetClosureCalled(top&ctxCallee != 0)
-
- // Do not typecheck fn twice, otherwise, we will end up pushing
- // fn to Target.Decls multiple times, causing InitLSym called twice.
- // See #30709
- if fn.Typecheck() == 1 {
- clo.SetType(fn.Type())
- return
- }
-
- // Don't give a name and add to Target.Decls if we are typechecking an inlined
- // body in ImportedBody(), since we only want to create the named function
- // when the closure is actually inlined (and then we force a typecheck
- // explicitly in (*inlsubst).node()).
- if !inTypeCheckInl {
- fn.Nname.SetSym(ClosureName(ir.CurFunc))
- ir.MarkFunc(fn.Nname)
- }
+ ir.NameClosure(clo, ir.CurFunc)
Func(fn)
- clo.SetType(fn.Type())
// Type check the body now, but only if we're inside a function.
// At top level (in a variable initialization: curfn==nil) we're not
// ready to type check code yet; we'll check it later, because the
// underlying closure function we create is added to Target.Decls.
- if ir.CurFunc != nil && clo.Type() != nil {
+ if ir.CurFunc != nil {
oldfn := ir.CurFunc
ir.CurFunc = fn
Stmts(fn.Body)
@@ -353,14 +275,17 @@ func tcClosure(clo *ir.ClosureExpr, top int) {
}
fn.ClosureVars = fn.ClosureVars[:out]
- if base.Flag.W > 1 {
- s := fmt.Sprintf("New closure func: %s", ir.FuncName(fn))
- ir.Dump(s, fn)
- }
- if !inTypeCheckInl {
- // Add function to Target.Decls once only when we give it a name
- Target.Decls = append(Target.Decls, fn)
+ clo.SetType(fn.Type())
+
+ target := Target
+ if inTypeCheckInl {
+ // We're typechecking an imported function, so it's not actually
+ // part of Target. Skip adding it to Target.Decls so we don't
+ // compile it again.
+ target = nil
}
+
+ return ir.UseClosure(clo, target)
}
// type check function definition
@@ -390,10 +315,6 @@ func tcFunc(n *ir.Func) {
// tcCall typechecks an OCALL node.
func tcCall(n *ir.CallExpr, top int) ir.Node {
- n.Use = ir.CallUseExpr
- if top == ctxStmt {
- n.Use = ir.CallUseStmt
- }
Stmts(n.Init()) // imported rewritten f(g()) calls (#30907)
n.X = typecheck(n.X, ctxExpr|ctxType|ctxCallee)
if n.X.Diag() {
@@ -509,6 +430,7 @@ func tcCall(n *ir.CallExpr, top int) ir.Node {
}
typecheckaste(ir.OCALL, n.X, n.IsDDD, t.Params(), n.Args, func() string { return fmt.Sprintf("argument to %v", n.X) })
+ FixMethodCall(n)
if t.NumResults() == 0 {
return n
}
@@ -979,6 +901,21 @@ func tcRecover(n *ir.CallExpr) ir.Node {
return n
}
+// tcRecoverFP typechecks an ORECOVERFP node.
+func tcRecoverFP(n *ir.CallExpr) ir.Node {
+ if len(n.Args) != 1 {
+ base.FatalfAt(n.Pos(), "wrong number of arguments: %v", n)
+ }
+
+ n.Args[0] = Expr(n.Args[0])
+ if !n.Args[0].Type().IsPtrShaped() {
+ base.FatalfAt(n.Pos(), "%L is not pointer shaped", n.Args[0])
+ }
+
+ n.SetType(types.Types[types.TINTER])
+ return n
+}
+
// tcUnsafeAdd typechecks an OUNSAFEADD node.
func tcUnsafeAdd(n *ir.BinaryExpr) *ir.BinaryExpr {
if !types.AllowsGoVersion(curpkg(), 1, 17) {
diff --git a/src/cmd/compile/internal/typecheck/iexport.go b/src/cmd/compile/internal/typecheck/iexport.go
index 64d68ef625..75d6115783 100644
--- a/src/cmd/compile/internal/typecheck/iexport.go
+++ b/src/cmd/compile/internal/typecheck/iexport.go
@@ -173,6 +173,8 @@
// }
//
//
+// TODO(danscales): fill in doc for 'type TypeParamType' and 'type InstType'
+//
// type Signature struct {
// Params []Param
// Results []Param
@@ -202,7 +204,6 @@
package typecheck
import (
- "bufio"
"bytes"
"crypto/md5"
"encoding/binary"
@@ -221,9 +222,18 @@ import (
)
// Current indexed export format version. Increase with each format change.
-// 1: added column details to Pos
// 0: Go1.11 encoding
-const iexportVersion = 1
+// 1: added column details to Pos
+// 2: added information for generic function/types (currently unstable)
+const (
+ iexportVersionGo1_11 = 0
+ iexportVersionPosCol = 1
+ // TODO: before release, change this back to 2. Kept at previous version
+ // for now (for testing).
+ iexportVersionGenerics = iexportVersionPosCol
+
+ iexportVersionCurrent = iexportVersionGenerics
+)
// predeclReserved is the number of type offsets reserved for types
// implicitly declared in the universe block.
@@ -244,6 +254,9 @@ const (
signatureType
structType
interfaceType
+ typeParamType
+ instType
+ unionType
)
const (
@@ -251,13 +264,22 @@ const (
magic = 0x6742937dc293105
)
-func WriteExports(out *bufio.Writer) {
+// WriteExports writes the indexed export format to out. If extensions
+// is true, then the compiler-only extensions are included.
+func WriteExports(out io.Writer, extensions bool) {
+ if extensions {
+ // If we're exporting inline bodies, invoke the crawler to mark
+ // which bodies to include.
+ crawlExports(Target.Exports)
+ }
+
p := iexporter{
allPkgs: map[*types.Pkg]bool{},
stringIndex: map[string]uint64{},
declIndex: map[*types.Sym]uint64{},
inlineIndex: map[*types.Sym]uint64{},
typIndex: map[*types.Type]uint64{},
+ extensions: extensions,
}
for i, pt := range predeclared() {
@@ -293,7 +315,7 @@ func WriteExports(out *bufio.Writer) {
// Assemble header.
var hdr intWriter
hdr.WriteByte('i')
- hdr.uint64(iexportVersion)
+ hdr.uint64(iexportVersionCurrent)
hdr.uint64(uint64(p.strings.Len()))
hdr.uint64(dataLen)
@@ -379,6 +401,8 @@ type iexporter struct {
declIndex map[*types.Sym]uint64
inlineIndex map[*types.Sym]uint64
typIndex map[*types.Type]uint64
+
+ extensions bool
}
// stringOff returns the offset of s within the string section.
@@ -449,7 +473,9 @@ func (p *iexporter) doDecl(n *ir.Name) {
w.tag('V')
w.pos(n.Pos())
w.typ(n.Type())
- w.varExt(n)
+ if w.p.extensions {
+ w.varExt(n)
+ }
case ir.PFUNC:
if ir.IsMethod(n) {
@@ -457,10 +483,25 @@ func (p *iexporter) doDecl(n *ir.Name) {
}
// Function.
- w.tag('F')
+ if n.Type().TParams().NumFields() == 0 {
+ w.tag('F')
+ } else {
+ w.tag('G')
+ }
w.pos(n.Pos())
+ // The tparam list of the function type is the
+ // declaration of the type params. So, write out the type
+ // params right now. Then those type params will be
+ // referenced via their type offset (via typOff) in all
+ // other places in the signature and function that they
+ // are used.
+ if n.Type().TParams().NumFields() > 0 {
+ w.tparamList(n.Type().TParams().FieldSlice())
+ }
w.signature(n.Type())
- w.funcExt(n)
+ if w.p.extensions {
+ w.funcExt(n)
+ }
default:
base.Fatalf("unexpected class: %v, %v", n, n.Class)
@@ -476,10 +517,25 @@ func (p *iexporter) doDecl(n *ir.Name) {
w.tag('C')
w.pos(n.Pos())
w.value(n.Type(), n.Val())
- w.constExt(n)
+ if w.p.extensions {
+ w.constExt(n)
+ }
case ir.OTYPE:
- if types.IsDotAlias(n.Sym()) {
+ if n.Type().IsTypeParam() && n.Type().Underlying() == n.Type() {
+ // Even though it has local scope, a typeparam requires a
+ // declaration via its package and unique name, because it
+ // may be referenced within its type bound during its own
+ // definition.
+ w.tag('P')
+ // A typeparam has a name, and has a type bound rather
+ // than an underlying type.
+ w.pos(n.Pos())
+ w.typ(n.Type().Bound())
+ break
+ }
+
+ if n.Alias() {
// Alias.
w.tag('A')
w.pos(n.Pos())
@@ -488,9 +544,18 @@ func (p *iexporter) doDecl(n *ir.Name) {
}
// Defined type.
- w.tag('T')
+ if len(n.Type().RParams()) == 0 {
+ w.tag('T')
+ } else {
+ w.tag('U')
+ }
w.pos(n.Pos())
+ if len(n.Type().RParams()) > 0 {
+ // Export type parameters, if any, needed for this type
+ w.typeList(n.Type().RParams())
+ }
+
underlying := n.Type().Underlying()
if underlying == types.ErrorType.Underlying() {
// For "type T error", use error as the
@@ -505,22 +570,31 @@ func (p *iexporter) doDecl(n *ir.Name) {
t := n.Type()
if t.IsInterface() {
- w.typeExt(t)
+ if w.p.extensions {
+ w.typeExt(t)
+ }
break
}
- ms := t.Methods()
- w.uint64(uint64(ms.Len()))
- for _, m := range ms.Slice() {
+ // Sort methods, for consistency with types2.
+ methods := append([]*types.Field(nil), t.Methods().Slice()...)
+ if base.Debug.UnifiedQuirks != 0 {
+ sort.Sort(types.MethodsByName(methods))
+ }
+
+ w.uint64(uint64(len(methods)))
+ for _, m := range methods {
w.pos(m.Pos)
w.selector(m.Sym)
w.param(m.Type.Recv())
w.signature(m.Type)
}
- w.typeExt(t)
- for _, m := range ms.Slice() {
- w.methExt(m)
+ if w.p.extensions {
+ w.typeExt(t)
+ for _, m := range methods {
+ w.methExt(m)
+ }
}
default:
@@ -803,8 +877,46 @@ func (w *exportWriter) startType(k itag) {
}
func (w *exportWriter) doTyp(t *types.Type) {
- if t.Sym() != nil {
- if t.Sym().Pkg == types.BuiltinPkg || t.Sym().Pkg == ir.Pkgs.Unsafe {
+ s := t.Sym()
+ if s != nil && t.OrigSym != nil {
+ assert(base.Flag.G > 0)
+ // This is an instantiated type - could be a re-instantiation like
+ // Value[T2] or a full instantiation like Value[int].
+ if strings.Index(s.Name, "[") < 0 {
+ base.Fatalf("incorrect name for instantiated type")
+ }
+ w.startType(instType)
+ w.pos(t.Pos())
+ // Export the type arguments for the instantiated type. The
+ // instantiated type could be in a method header (e.g. "func (v
+ // *Value[T2]) set (...) { ... }"), so the type args are "new"
+ // typeparams. Or the instantiated type could be in a
+ // function/method body, so the type args are either concrete
+ // types or existing typeparams from the function/method header.
+ w.typeList(t.RParams())
+ // Export a reference to the base type.
+ baseType := t.OrigSym.Def.(*ir.Name).Type()
+ w.typ(baseType)
+ return
+ }
+
+ // The 't.Underlying() == t' check is to confirm this is a base typeparam
+ // type, rather than a defined type with typeparam underlying type, like:
+ // type orderedAbs[T any] T
+ if t.IsTypeParam() && t.Underlying() == t {
+ assert(base.Flag.G > 0)
+ if s.Pkg == types.BuiltinPkg || s.Pkg == ir.Pkgs.Unsafe {
+ base.Fatalf("builtin type missing from typIndex: %v", t)
+ }
+ // Write out the first use of a type param as a qualified ident.
+ // This will force a "declaration" of the type param.
+ w.startType(typeParamType)
+ w.qualifiedIdent(t.Obj().(*ir.Name))
+ return
+ }
+
+ if s != nil {
+ if s.Pkg == types.BuiltinPkg || s.Pkg == ir.Pkgs.Unsafe {
base.Fatalf("builtin type missing from typIndex: %v", t)
}
@@ -865,6 +977,14 @@ func (w *exportWriter) doTyp(t *types.Type) {
}
}
+ // Sort methods and embedded types, for consistency with types2.
+ // Note: embedded types may be anonymous, and types2 sorts them
+ // with sort.Stable too.
+ if base.Debug.UnifiedQuirks != 0 {
+ sort.Sort(types.MethodsByName(methods))
+ sort.Stable(types.EmbeddedsByName(embeddeds))
+ }
+
w.startType(interfaceType)
w.setPkg(t.Pkg(), true)
@@ -881,6 +1001,19 @@ func (w *exportWriter) doTyp(t *types.Type) {
w.signature(f.Type)
}
+ case types.TUNION:
+ assert(base.Flag.G > 0)
+ // TODO(danscales): possibly put out the tilde bools in more
+ // compact form.
+ w.startType(unionType)
+ nt := t.NumTerms()
+ w.uint64(uint64(nt))
+ for i := 0; i < nt; i++ {
+ typ, tilde := t.Term(i)
+ w.bool(tilde)
+ w.typ(typ)
+ }
+
default:
base.Fatalf("unexpected type: %v", t)
}
@@ -906,6 +1039,23 @@ func (w *exportWriter) signature(t *types.Type) {
}
}
+func (w *exportWriter) typeList(ts []*types.Type) {
+ w.uint64(uint64(len(ts)))
+ for _, rparam := range ts {
+ w.typ(rparam)
+ }
+}
+
+func (w *exportWriter) tparamList(fs []*types.Field) {
+ w.uint64(uint64(len(fs)))
+ for _, f := range fs {
+ if !f.Type.IsTypeParam() {
+ base.Fatalf("unexpected non-typeparam")
+ }
+ w.typ(f.Type)
+ }
+}
+
func (w *exportWriter) paramList(fs []*types.Field) {
w.uint64(uint64(len(fs)))
for _, f := range fs {
@@ -948,26 +1098,50 @@ func constTypeOf(typ *types.Type) constant.Kind {
}
func (w *exportWriter) value(typ *types.Type, v constant.Value) {
- ir.AssertValidTypeForConst(typ, v)
w.typ(typ)
+ var kind constant.Kind
+ var valType *types.Type
+
+ if typ.IsTypeParam() {
+ // A constant will have a TYPEPARAM type if it appears in a place
+ // where it must match that typeparam type (e.g. in a binary
+ // operation with a variable of that typeparam type). If so, then
+ // we must write out its actual constant kind as well, so its
+ // constant val can be read in properly during import.
+ kind = v.Kind()
+ w.int64(int64(kind))
+
+ switch kind {
+ case constant.Int:
+ valType = types.Types[types.TINT64]
+ case constant.Float:
+ valType = types.Types[types.TFLOAT64]
+ case constant.Complex:
+ valType = types.Types[types.TCOMPLEX128]
+ }
+ } else {
+ ir.AssertValidTypeForConst(typ, v)
+ kind = constTypeOf(typ)
+ valType = typ
+ }
- // Each type has only one admissible constant representation,
- // so we could type switch directly on v.U here. However,
- // switching on the type increases symmetry with import logic
- // and provides a useful consistency check.
+ // Each type has only one admissible constant representation, so we could
+ // type switch directly on v.Kind() here. However, switching on the type
+ // (in the non-typeparam case) increases symmetry with import logic and
+ // provides a useful consistency check.
- switch constTypeOf(typ) {
+ switch kind {
case constant.Bool:
w.bool(constant.BoolVal(v))
case constant.String:
w.string(constant.StringVal(v))
case constant.Int:
- w.mpint(v, typ)
+ w.mpint(v, valType)
case constant.Float:
- w.mpfloat(v, typ)
+ w.mpfloat(v, valType)
case constant.Complex:
- w.mpfloat(constant.Real(v), typ)
- w.mpfloat(constant.Imag(v), typ)
+ w.mpfloat(constant.Real(v), valType)
+ w.mpfloat(constant.Imag(v), valType)
}
}
@@ -1185,10 +1359,14 @@ func (w *exportWriter) funcExt(n *ir.Name) {
}
}
- // Inline body.
+ // Write out inline body or body of a generic function/method.
+ if n.Type().HasTParam() && n.Func.Body != nil && n.Func.Inl == nil {
+ base.FatalfAt(n.Pos(), "generic function is not marked inlineable")
+ }
if n.Func.Inl != nil {
w.uint64(1 + uint64(n.Func.Inl.Cost))
- if n.Func.ExportInline() {
+ w.bool(n.Func.Inl.CanDelayResults)
+ if n.Func.ExportInline() || n.Type().HasTParam() {
w.p.doInline(n)
}
@@ -1432,7 +1610,12 @@ func (w *exportWriter) commList(cases []*ir.CommClause) {
w.uint64(uint64(len(cases)))
for _, cas := range cases {
w.pos(cas.Pos())
- w.node(cas.Comm)
+ defaultCase := cas.Comm == nil
+ w.bool(defaultCase)
+ if !defaultCase {
+ // Only call w.node for non-default cause (cas.Comm is non-nil)
+ w.node(cas.Comm)
+ }
w.stmtList(cas.Body)
}
}
@@ -1460,7 +1643,9 @@ func (w *exportWriter) expr(n ir.Node) {
// (somewhat closely following the structure of exprfmt in fmt.go)
case ir.ONIL:
n := n.(*ir.NilExpr)
- if !n.Type().HasNil() {
+ // If n is a typeparam, it will have already been checked
+ // for proper use by the types2 typechecker.
+ if !n.Type().IsTypeParam() && !n.Type().HasNil() {
base.Fatalf("unexpected type for nil: %v", n.Type())
}
w.op(ir.ONIL)
@@ -1469,7 +1654,11 @@ func (w *exportWriter) expr(n ir.Node) {
case ir.OLITERAL:
w.op(ir.OLITERAL)
- w.pos(n.Pos())
+ if ir.HasUniquePos(n) {
+ w.pos(n.Pos())
+ } else {
+ w.pos(src.NoXPos)
+ }
w.value(n.Type(), n.Val())
case ir.ONAME:
@@ -1488,6 +1677,16 @@ func (w *exportWriter) expr(n ir.Node) {
// We don't need a type here, as the type will be provided at the
// declaration of n.
w.op(ir.ONAME)
+
+ // This handles the case where we haven't yet transformed a call
+ // to a builtin, so we must write out the builtin as a name in the
+ // builtin package.
+ isBuiltin := n.BuiltinOp != ir.OXXX
+ w.bool(isBuiltin)
+ if isBuiltin {
+ w.string(n.Sym().Name)
+ break
+ }
w.localName(n)
// case OPACK, ONONAME:
@@ -1585,12 +1784,11 @@ func (w *exportWriter) expr(n ir.Node) {
// case OSTRUCTKEY:
// unreachable - handled in case OSTRUCTLIT by elemList
- case ir.OXDOT, ir.ODOT, ir.ODOTPTR, ir.ODOTINTER, ir.ODOTMETH, ir.OCALLPART, ir.OMETHEXPR:
+ case ir.OXDOT, ir.ODOT, ir.ODOTPTR, ir.ODOTINTER, ir.ODOTMETH, ir.OMETHVALUE, ir.OMETHEXPR:
n := n.(*ir.SelectorExpr)
if go117ExportTypes {
- if n.Op() == ir.OXDOT {
- base.Fatalf("shouldn't encounter XDOT in new exporter")
- }
+ // For go117ExportTypes, we usually see all ops except
+ // OXDOT, but we can see OXDOT for generic functions.
w.op(n.Op())
} else {
w.op(ir.OXDOT)
@@ -1600,11 +1798,16 @@ func (w *exportWriter) expr(n ir.Node) {
w.exoticSelector(n.Sel)
if go117ExportTypes {
w.exoticType(n.Type())
- if n.Op() == ir.ODOT || n.Op() == ir.ODOTPTR || n.Op() == ir.ODOTINTER {
+ if n.Op() == ir.OXDOT {
+ // n.Selection for method references will be
+ // reconstructed during import.
+ w.bool(n.Selection != nil)
+ } else if n.Op() == ir.ODOT || n.Op() == ir.ODOTPTR || n.Op() == ir.ODOTINTER {
w.exoticField(n.Selection)
}
- // n.Selection is not required for OMETHEXPR, ODOTMETH, and OCALLPART. It will
- // be reconstructed during import.
+ // n.Selection is not required for OMETHEXPR, ODOTMETH, and OMETHVALUE. It will
+ // be reconstructed during import. n.Selection is computed during
+ // transformDot() for OXDOT.
}
case ir.ODOTTYPE, ir.ODOTTYPE2:
@@ -1629,7 +1832,7 @@ func (w *exportWriter) expr(n ir.Node) {
w.expr(n.X)
w.expr(n.Index)
if go117ExportTypes {
- w.typ(n.Type())
+ w.exoticType(n.Type())
if n.Op() == ir.OINDEXMAP {
w.bool(n.Assigned)
}
@@ -1677,7 +1880,7 @@ func (w *exportWriter) expr(n ir.Node) {
w.op(ir.OEND)
}
- case ir.OCONV, ir.OCONVIFACE, ir.OCONVNOP, ir.OBYTES2STR, ir.ORUNES2STR, ir.OSTR2BYTES, ir.OSTR2RUNES, ir.ORUNESTR, ir.OSLICE2ARRPTR:
+ case ir.OCONV, ir.OCONVIFACE, ir.OCONVIDATA, ir.OCONVNOP, ir.OBYTES2STR, ir.ORUNES2STR, ir.OSTR2BYTES, ir.OSTR2RUNES, ir.ORUNESTR, ir.OSLICE2ARRPTR:
n := n.(*ir.ConvExpr)
if go117ExportTypes {
w.op(n.Op())
@@ -1732,7 +1935,6 @@ func (w *exportWriter) expr(n ir.Node) {
w.bool(n.IsDDD)
if go117ExportTypes {
w.exoticType(n.Type())
- w.uint64(uint64(n.Use))
}
case ir.OMAKEMAP, ir.OMAKECHAN, ir.OMAKESLICE:
@@ -1759,8 +1961,16 @@ func (w *exportWriter) expr(n ir.Node) {
w.op(ir.OEND)
}
+ case ir.OLINKSYMOFFSET:
+ n := n.(*ir.LinksymOffsetExpr)
+ w.op(ir.OLINKSYMOFFSET)
+ w.pos(n.Pos())
+ w.string(n.Linksym.Name)
+ w.uint64(uint64(n.Offset_))
+ w.typ(n.Type())
+
// unary expressions
- case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT, ir.ORECV:
+ case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT, ir.ORECV, ir.OIDATA:
n := n.(*ir.UnaryExpr)
w.op(n.Op())
w.pos(n.Pos())
@@ -1796,7 +2006,7 @@ func (w *exportWriter) expr(n ir.Node) {
// binary expressions
case ir.OADD, ir.OAND, ir.OANDNOT, ir.ODIV, ir.OEQ, ir.OGE, ir.OGT, ir.OLE, ir.OLT,
- ir.OLSH, ir.OMOD, ir.OMUL, ir.ONE, ir.OOR, ir.ORSH, ir.OSUB, ir.OXOR:
+ ir.OLSH, ir.OMOD, ir.OMUL, ir.ONE, ir.OOR, ir.ORSH, ir.OSUB, ir.OXOR, ir.OEFACE:
n := n.(*ir.BinaryExpr)
w.op(n.Op())
w.pos(n.Pos())
@@ -1829,6 +2039,26 @@ func (w *exportWriter) expr(n ir.Node) {
// if exporting, DCLCONST should just be removed as its usage
// has already been replaced with literals
+ case ir.OFUNCINST:
+ n := n.(*ir.InstExpr)
+ w.op(ir.OFUNCINST)
+ w.pos(n.Pos())
+ w.expr(n.X)
+ w.uint64(uint64(len(n.Targs)))
+ for _, targ := range n.Targs {
+ w.typ(targ.Type())
+ }
+ if go117ExportTypes {
+ w.typ(n.Type())
+ }
+
+ case ir.OSELRECV2:
+ n := n.(*ir.AssignListStmt)
+ w.op(ir.OSELRECV2)
+ w.pos(n.Pos())
+ w.exprList(n.Lhs)
+ w.exprList(n.Rhs)
+
default:
base.Fatalf("cannot export %v (%d) node\n"+
"\t==> please file an issue and assign to gri@", n.Op(), int(n.Op()))
@@ -1864,11 +2094,8 @@ func (w *exportWriter) fieldList(list ir.Nodes) {
for _, n := range list {
n := n.(*ir.StructKeyExpr)
w.pos(n.Pos())
- w.selector(n.Field)
+ w.exoticField(n.Field)
w.expr(n.Value)
- if go117ExportTypes {
- w.uint64(uint64(n.Offset))
- }
}
}
@@ -1902,8 +2129,15 @@ func (w *exportWriter) localIdent(s *types.Sym) {
return
}
- // TODO(mdempsky): Fix autotmp hack.
- if i := strings.LastIndex(name, "."); i >= 0 && !strings.HasPrefix(name, ".autotmp_") {
+ // The name of autotmp variables isn't important; they just need to
+ // be unique. To stabilize the export data, simply write out "$" as
+ // a marker and let the importer generate its own unique name.
+ if strings.HasPrefix(name, ".autotmp_") {
+ w.string("$autotmp")
+ return
+ }
+
+ if i := strings.LastIndex(name, "."); i >= 0 && !strings.HasPrefix(name, ".dict") { // TODO: just use autotmp names for dictionaries?
base.Fatalf("unexpected dot in identifier: %v", name)
}
diff --git a/src/cmd/compile/internal/typecheck/iimport.go b/src/cmd/compile/internal/typecheck/iimport.go
index 37f5a7bba0..a1a3ac3e8a 100644
--- a/src/cmd/compile/internal/typecheck/iimport.go
+++ b/src/cmd/compile/internal/typecheck/iimport.go
@@ -11,7 +11,6 @@ import (
"encoding/binary"
"fmt"
"go/constant"
- "io"
"math/big"
"os"
"strings"
@@ -19,8 +18,6 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
- "cmd/internal/bio"
- "cmd/internal/goobj"
"cmd/internal/obj"
"cmd/internal/src"
)
@@ -94,7 +91,7 @@ func importReaderFor(sym *types.Sym, importers map[*types.Sym]iimporterAndOffset
}
type intReader struct {
- *bio.Reader
+ *strings.Reader
pkg *types.Pkg
}
@@ -116,33 +113,34 @@ func (r *intReader) uint64() uint64 {
return i
}
-func ReadImports(pkg *types.Pkg, in *bio.Reader) (fingerprint goobj.FingerprintType) {
- ird := &intReader{in, pkg}
+func ReadImports(pkg *types.Pkg, data string) {
+ ird := &intReader{strings.NewReader(data), pkg}
version := ird.uint64()
- if version != iexportVersion {
- base.Errorf("import %q: unknown export format version %d", pkg.Path, version)
+ switch version {
+ case /* iexportVersionGenerics, */ iexportVersionPosCol, iexportVersionGo1_11:
+ default:
+ if version > iexportVersionGenerics {
+ base.Errorf("import %q: unstable export format version %d, just recompile", pkg.Path, version)
+ } else {
+ base.Errorf("import %q: unknown export format version %d", pkg.Path, version)
+ }
base.ErrorExit()
}
- sLen := ird.uint64()
- dLen := ird.uint64()
-
- // Map string (and data) section into memory as a single large
- // string. This reduces heap fragmentation and allows
- // returning individual substrings very efficiently.
- data, err := mapFile(in.File(), in.Offset(), int64(sLen+dLen))
- if err != nil {
- base.Errorf("import %q: mapping input: %v", pkg.Path, err)
- base.ErrorExit()
- }
- stringData := data[:sLen]
- declData := data[sLen:]
+ sLen := int64(ird.uint64())
+ dLen := int64(ird.uint64())
- in.MustSeek(int64(sLen+dLen), os.SEEK_CUR)
+ // TODO(mdempsky): Replace os.SEEK_CUR with io.SeekCurrent after
+ // #44505 is fixed.
+ whence, _ := ird.Seek(0, os.SEEK_CUR)
+ stringData := data[whence : whence+sLen]
+ declData := data[whence+sLen : whence+sLen+dLen]
+ ird.Seek(sLen+dLen, os.SEEK_CUR)
p := &iimporter{
- ipkg: pkg,
+ exportVersion: version,
+ ipkg: pkg,
pkgCache: map[uint64]*types.Pkg{},
posBaseCache: map[uint64]*src.PosBase{},
@@ -200,18 +198,11 @@ func ReadImports(pkg *types.Pkg, in *bio.Reader) (fingerprint goobj.FingerprintT
}
}
}
-
- // Fingerprint.
- _, err = io.ReadFull(in, fingerprint[:])
- if err != nil {
- base.Errorf("import %s: error reading fingerprint", pkg.Path)
- base.ErrorExit()
- }
- return fingerprint
}
type iimporter struct {
- ipkg *types.Pkg
+ exportVersion uint64
+ ipkg *types.Pkg
pkgCache map[uint64]*types.Pkg
posBaseCache map[uint64]*src.PosBase
@@ -273,6 +264,7 @@ type importReader struct {
// Slice of all dcls for function, including any interior closures
allDcls []*ir.Name
allClosureVars []*ir.Name
+ autotmpgen int
}
func (p *iimporter) newReader(off uint64, pkg *types.Pkg) *importReader {
@@ -302,37 +294,53 @@ func (r *importReader) doDecl(sym *types.Sym) *ir.Name {
case 'A':
typ := r.typ()
- return importalias(r.p.ipkg, pos, sym, typ)
+ return importalias(pos, sym, typ)
case 'C':
typ := r.typ()
val := r.value(typ)
- n := importconst(r.p.ipkg, pos, sym, typ, val)
+ n := importconst(pos, sym, typ, val)
r.constExt(n)
return n
- case 'F':
- typ := r.signature(nil)
+ case 'F', 'G':
+ var tparams []*types.Field
+ if tag == 'G' {
+ tparams = r.tparamList()
+ }
+ typ := r.signature(nil, tparams)
- n := importfunc(r.p.ipkg, pos, sym, typ)
+ n := importfunc(pos, sym, typ)
r.funcExt(n)
return n
- case 'T':
+ case 'T', 'U':
+ var rparams []*types.Type
+ if tag == 'U' {
+ rparams = r.typeList()
+ }
+
// Types can be recursive. We need to setup a stub
// declaration before recursing.
- n := importtype(r.p.ipkg, pos, sym)
+ n := importtype(pos, sym)
t := n.Type()
+ if tag == 'U' {
+ t.SetRParams(rparams)
+ }
// We also need to defer width calculations until
// after the underlying type has been assigned.
types.DeferCheckSize()
+ deferDoInst()
underlying := r.typ()
t.SetUnderlying(underlying)
- types.ResumeCheckSize()
if underlying.IsInterface() {
+ // Finish up all type instantiations and CheckSize calls
+ // now that a top-level type is fully constructed.
+ resumeDoInst()
+ types.ResumeCheckSize()
r.typeExt(t)
return n
}
@@ -342,7 +350,7 @@ func (r *importReader) doDecl(sym *types.Sym) *ir.Name {
mpos := r.pos()
msym := r.selector()
recv := r.param()
- mtyp := r.signature(recv)
+ mtyp := r.signature(recv, nil)
// MethodSym already marked m.Sym as a function.
m := ir.NewNameAt(mpos, ir.MethodSym(recv.Type, msym))
@@ -358,16 +366,43 @@ func (r *importReader) doDecl(sym *types.Sym) *ir.Name {
}
t.Methods().Set(ms)
+ // Finish up all instantiations and CheckSize calls now
+ // that a top-level type is fully constructed.
+ resumeDoInst()
+ types.ResumeCheckSize()
+
r.typeExt(t)
for _, m := range ms {
r.methExt(m)
}
return n
+ case 'P':
+ if r.p.exportVersion < iexportVersionGenerics {
+ base.Fatalf("unexpected type param type")
+ }
+ if sym.Def != nil {
+ // Make sure we use the same type param type for the same
+ // name, whether it is created during types1-import or
+ // this types2-to-types1 translation.
+ return sym.Def.(*ir.Name)
+ }
+ // The typeparam index is set at the point where the containing type
+ // param list is imported.
+ t := types.NewTypeParam(sym, 0)
+ // Nname needed to save the pos.
+ nname := ir.NewDeclNameAt(pos, ir.OTYPE, sym)
+ sym.Def = nname
+ nname.SetType(t)
+ t.SetNod(nname)
+
+ t.SetBound(r.typ())
+ return nname
+
case 'V':
typ := r.typ()
- n := importvar(r.p.ipkg, pos, sym, typ)
+ n := importvar(pos, sym, typ)
r.varExt(n)
return n
@@ -377,27 +412,47 @@ func (r *importReader) doDecl(sym *types.Sym) *ir.Name {
}
}
-func (p *importReader) value(typ *types.Type) constant.Value {
- switch constTypeOf(typ) {
+func (r *importReader) value(typ *types.Type) constant.Value {
+ var kind constant.Kind
+ var valType *types.Type
+
+ if typ.IsTypeParam() {
+ // If a constant had a typeparam type, then we wrote out its
+ // actual constant kind as well.
+ kind = constant.Kind(r.int64())
+ switch kind {
+ case constant.Int:
+ valType = types.Types[types.TINT64]
+ case constant.Float:
+ valType = types.Types[types.TFLOAT64]
+ case constant.Complex:
+ valType = types.Types[types.TCOMPLEX128]
+ }
+ } else {
+ kind = constTypeOf(typ)
+ valType = typ
+ }
+
+ switch kind {
case constant.Bool:
- return constant.MakeBool(p.bool())
+ return constant.MakeBool(r.bool())
case constant.String:
- return constant.MakeString(p.string())
+ return constant.MakeString(r.string())
case constant.Int:
var i big.Int
- p.mpint(&i, typ)
+ r.mpint(&i, valType)
return constant.Make(&i)
case constant.Float:
- return p.float(typ)
+ return r.float(valType)
case constant.Complex:
- return makeComplex(p.float(typ), p.float(typ))
+ return makeComplex(r.float(valType), r.float(valType))
}
base.Fatalf("unexpected value type: %v", typ)
panic("unreachable")
}
-func (p *importReader) mpint(x *big.Int, typ *types.Type) {
+func (r *importReader) mpint(x *big.Int, typ *types.Type) {
signed, maxBytes := intSize(typ)
maxSmall := 256 - maxBytes
@@ -408,7 +463,7 @@ func (p *importReader) mpint(x *big.Int, typ *types.Type) {
maxSmall = 256
}
- n, _ := p.ReadByte()
+ n, _ := r.ReadByte()
if uint(n) < maxSmall {
v := int64(n)
if signed {
@@ -429,30 +484,30 @@ func (p *importReader) mpint(x *big.Int, typ *types.Type) {
base.Fatalf("weird decoding: %v, %v => %v", n, signed, v)
}
b := make([]byte, v)
- p.Read(b)
+ r.Read(b)
x.SetBytes(b)
if signed && n&1 != 0 {
x.Neg(x)
}
}
-func (p *importReader) float(typ *types.Type) constant.Value {
+func (r *importReader) float(typ *types.Type) constant.Value {
var mant big.Int
- p.mpint(&mant, typ)
+ r.mpint(&mant, typ)
var f big.Float
f.SetInt(&mant)
if f.Sign() != 0 {
- f.SetMantExp(&f, int(p.int64()))
+ f.SetMantExp(&f, int(r.int64()))
}
return constant.Make(&f)
}
-func (p *importReader) mprat(orig constant.Value) constant.Value {
- if !p.bool() {
+func (r *importReader) mprat(orig constant.Value) constant.Value {
+ if !r.bool() {
return orig
}
var rat big.Rat
- rat.SetString(p.string())
+ rat.SetString(r.string())
return constant.Make(&rat)
}
@@ -462,8 +517,15 @@ func (r *importReader) ident(selector bool) *types.Sym {
return nil
}
pkg := r.currPkg
- if selector && types.IsExported(name) {
- pkg = types.LocalPkg
+ if selector {
+ if types.IsExported(name) {
+ pkg = types.LocalPkg
+ }
+ } else {
+ if name == "$autotmp" {
+ name = autotmpname(r.autotmpgen)
+ r.autotmpgen++
+ }
}
return pkg.Lookup(name)
}
@@ -503,7 +565,14 @@ func (r *importReader) pos() src.XPos {
}
func (r *importReader) typ() *types.Type {
- return r.p.typAt(r.uint64())
+ // If this is a top-level type call, defer type instantiations until the
+ // type is fully constructed.
+ types.DeferCheckSize()
+ deferDoInst()
+ t := r.p.typAt(r.uint64())
+ resumeDoInst()
+ types.ResumeCheckSize()
+ return t
}
func (r *importReader) exoticType() *types.Type {
@@ -641,7 +710,13 @@ func (p *iimporter) typAt(off uint64) *types.Type {
// are pushed to compile queue, then draining from the queue for compiling.
// During this process, the size calculation is disabled, so it is not safe for
// calculating size during SSA generation anymore. See issue #44732.
- types.CheckSize(t)
+ //
+ // No need to calc sizes for re-instantiated generic types, and
+ // they are not necessarily resolved until the top-level type is
+ // defined (because of recursive types).
+ if t.OrigSym == nil || !t.HasTParam() {
+ types.CheckSize(t)
+ }
p.typCache[off] = t
}
return t
@@ -680,7 +755,7 @@ func (r *importReader) typ1() *types.Type {
case signatureType:
r.setPkg()
- return r.signature(nil)
+ return r.signature(nil, nil)
case structType:
r.setPkg()
@@ -718,16 +793,64 @@ func (r *importReader) typ1() *types.Type {
for i := range methods {
pos := r.pos()
sym := r.selector()
- typ := r.signature(fakeRecvField())
+ typ := r.signature(fakeRecvField(), nil)
methods[i] = types.NewField(pos, sym, typ)
}
+ if len(embeddeds)+len(methods) == 0 {
+ return types.Types[types.TINTER]
+ }
+
t := types.NewInterface(r.currPkg, append(embeddeds, methods...))
// Ensure we expand the interface in the frontend (#25055).
types.CheckSize(t)
return t
+
+ case typeParamType:
+ if r.p.exportVersion < iexportVersionGenerics {
+ base.Fatalf("unexpected type param type")
+ }
+ // Similar to code for defined types, since we "declared"
+ // typeparams to deal with recursion (typeparam is used within its
+ // own type bound).
+ ident := r.qualifiedIdent()
+ if ident.Sym().Def != nil {
+ return ident.Sym().Def.(*ir.Name).Type()
+ }
+ n := expandDecl(ident)
+ if n.Op() != ir.OTYPE {
+ base.Fatalf("expected OTYPE, got %v: %v, %v", n.Op(), n.Sym(), n)
+ }
+ return n.Type()
+
+ case instType:
+ if r.p.exportVersion < iexportVersionGenerics {
+ base.Fatalf("unexpected instantiation type")
+ }
+ pos := r.pos()
+ len := r.uint64()
+ targs := make([]*types.Type, len)
+ for i := range targs {
+ targs[i] = r.typ()
+ }
+ baseType := r.typ()
+ t := Instantiate(pos, baseType, targs)
+ return t
+
+ case unionType:
+ if r.p.exportVersion < iexportVersionGenerics {
+ base.Fatalf("unexpected instantiation type")
+ }
+ nt := int(r.uint64())
+ terms := make([]*types.Type, nt)
+ tildes := make([]bool, nt)
+ for i := range terms {
+ tildes[i] = r.bool()
+ terms[i] = r.typ()
+ }
+ return types.NewUnion(terms, tildes)
}
}
@@ -735,13 +858,42 @@ func (r *importReader) kind() itag {
return itag(r.uint64())
}
-func (r *importReader) signature(recv *types.Field) *types.Type {
+func (r *importReader) signature(recv *types.Field, tparams []*types.Field) *types.Type {
params := r.paramList()
results := r.paramList()
if n := len(params); n > 0 {
params[n-1].SetIsDDD(r.bool())
}
- return types.NewSignature(r.currPkg, recv, nil, params, results)
+ return types.NewSignature(r.currPkg, recv, tparams, params, results)
+}
+
+func (r *importReader) typeList() []*types.Type {
+ n := r.uint64()
+ if n == 0 {
+ return nil
+ }
+ ts := make([]*types.Type, n)
+ for i := range ts {
+ ts[i] = r.typ()
+ if ts[i].IsTypeParam() {
+ ts[i].SetIndex(i)
+ }
+ }
+ return ts
+}
+
+func (r *importReader) tparamList() []*types.Field {
+ n := r.uint64()
+ if n == 0 {
+ return nil
+ }
+ fs := make([]*types.Field, n)
+ for i := range fs {
+ typ := r.typ()
+ typ.SetIndex(i)
+ fs[i] = types.NewField(typ.Pos(), typ.Sym(), typ)
+ }
+ return fs
}
func (r *importReader) paramList() []*types.Field {
@@ -809,7 +961,9 @@ func (r *importReader) funcExt(n *ir.Name) {
n.Func.ABI = obj.ABI(r.uint64())
- n.SetPragma(ir.PragmaFlag(r.uint64()))
+ // Make sure //go:noinline pragma is imported (so stenciled functions have
+ // same noinline status as the corresponding generic function.)
+ n.Func.Pragma = ir.PragmaFlag(r.uint64())
// Escape analysis.
for _, fs := range &types.RecvsParams {
@@ -821,7 +975,8 @@ func (r *importReader) funcExt(n *ir.Name) {
// Inline body.
if u := r.uint64(); u > 0 {
n.Func.Inl = &ir.Inline{
- Cost: int32(u - 1),
+ Cost: int32(u - 1),
+ CanDelayResults: r.bool(),
}
n.Func.Endlineno = r.pos()
}
@@ -852,7 +1007,13 @@ func (r *importReader) symIdx(s *types.Sym) {
func (r *importReader) typeExt(t *types.Type) {
t.SetNotInHeap(r.bool())
- i, pi := r.int64(), r.int64()
+ SetBaseTypeIndex(t, r.int64(), r.int64())
+}
+
+func SetBaseTypeIndex(t *types.Type, i, pi int64) {
+ if t.Obj() == nil {
+ base.Fatalf("SetBaseTypeIndex on non-defined type %v", t)
+ }
if i != -1 && pi != -1 {
typeSymIdx[t] = [2]int64{i, pi}
}
@@ -860,6 +1021,7 @@ func (r *importReader) typeExt(t *types.Type) {
// Map imported type T to the index of type descriptor symbols of T and *T,
// so we can use index to reference the symbol.
+// TODO(mdempsky): Store this information directly in the Type's Name.
var typeSymIdx = make(map[*types.Type][2]int64)
func BaseTypeIndex(t *types.Type) int64 {
@@ -936,6 +1098,10 @@ func (r *importReader) funcBody(fn *ir.Func) {
fn.Inl.Body = body
r.curfn = outerfn
+ if base.Flag.W >= 3 {
+ fmt.Printf("Imported for %v", fn)
+ ir.DumpList("", fn.Inl.Body)
+ }
}
func (r *importReader) readNames(fn *ir.Func) []*ir.Name {
@@ -1032,7 +1198,13 @@ func (r *importReader) caseList(switchExpr ir.Node) []*ir.CaseClause {
func (r *importReader) commList() []*ir.CommClause {
cases := make([]*ir.CommClause, r.uint64())
for i := range cases {
- cases[i] = ir.NewCommStmt(r.pos(), r.node(), r.stmtList())
+ pos := r.pos()
+ defaultCase := r.bool()
+ var comm ir.Node
+ if !defaultCase {
+ comm = r.node()
+ }
+ cases[i] = ir.NewCommStmt(pos, comm, r.stmtList())
}
return cases
}
@@ -1095,6 +1267,10 @@ func (r *importReader) node() ir.Node {
return n
case ir.ONAME:
+ isBuiltin := r.bool()
+ if isBuiltin {
+ return types.BuiltinPkg.Lookup(r.string()).Def.(*ir.Name)
+ }
return r.localName()
// case OPACK, ONONAME:
@@ -1117,28 +1293,18 @@ func (r *importReader) node() ir.Node {
case ir.OCLOSURE:
//println("Importing CLOSURE")
pos := r.pos()
- typ := r.signature(nil)
+ typ := r.signature(nil, nil)
// All the remaining code below is similar to (*noder).funcLit(), but
// with Dcls and ClosureVars lists already set up
- fn := ir.NewFunc(pos)
- fn.SetIsHiddenClosure(true)
- fn.Nname = ir.NewNameAt(pos, ir.BlankNode.Sym())
- fn.Nname.Func = fn
- fn.Nname.Ntype = ir.TypeNode(typ)
- fn.Nname.Defn = fn
+ fn := ir.NewClosureFunc(pos, true)
fn.Nname.SetType(typ)
cvars := make([]*ir.Name, r.int64())
for i := range cvars {
cvars[i] = ir.CaptureName(r.pos(), fn, r.localName().Canonical())
- if go117ExportTypes {
- if cvars[i].Type() != nil || cvars[i].Defn == nil {
- base.Fatalf("bad import of closure variable")
- }
- // Closure variable should have Defn set, which is its captured
- // variable, and it gets the same type as the captured variable.
- cvars[i].SetType(cvars[i].Defn.Type())
+ if go117ExportTypes && cvars[i].Defn == nil {
+ base.Fatalf("bad import of closure variable")
}
}
fn.ClosureVars = cvars
@@ -1159,12 +1325,10 @@ func (r *importReader) node() ir.Node {
ir.FinishCaptureNames(pos, r.curfn, fn)
- clo := ir.NewClosureExpr(pos, fn)
- fn.OClosure = clo
+ clo := fn.OClosure
if go117ExportTypes {
clo.SetType(typ)
}
-
return clo
case ir.OSTRUCTLIT:
@@ -1202,35 +1366,54 @@ func (r *importReader) node() ir.Node {
// case OSTRUCTKEY:
// unreachable - handled in case OSTRUCTLIT by elemList
- case ir.OXDOT:
- // see parser.new_dotname
- if go117ExportTypes {
- base.Fatalf("shouldn't encounter XDOT in new importer")
- }
- return ir.NewSelectorExpr(r.pos(), ir.OXDOT, r.expr(), r.exoticSelector())
-
- case ir.ODOT, ir.ODOTPTR, ir.ODOTINTER, ir.ODOTMETH, ir.OCALLPART, ir.OMETHEXPR:
- if !go117ExportTypes {
- // unreachable - mapped to case OXDOT by exporter
+ case ir.OXDOT, ir.ODOT, ir.ODOTPTR, ir.ODOTINTER, ir.ODOTMETH, ir.OMETHVALUE, ir.OMETHEXPR:
+ // For !go117ExportTypes, we should only see OXDOT.
+ // For go117ExportTypes, we usually see all the other ops, but can see
+ // OXDOT for generic functions.
+ if op != ir.OXDOT && !go117ExportTypes {
goto error
}
pos := r.pos()
expr := r.expr()
sel := r.exoticSelector()
n := ir.NewSelectorExpr(pos, op, expr, sel)
- n.SetType(r.exoticType())
- switch op {
- case ir.ODOT, ir.ODOTPTR, ir.ODOTINTER:
- n.Selection = r.exoticField()
- case ir.ODOTMETH, ir.OCALLPART, ir.OMETHEXPR:
- // These require a Lookup to link to the correct declaration.
- rcvrType := expr.Type()
- typ := n.Type()
- n.Selection = Lookdot(n, rcvrType, 1)
- if op == ir.OCALLPART || op == ir.OMETHEXPR {
- // Lookdot clobbers the opcode and type, undo that.
- n.SetOp(op)
- n.SetType(typ)
+ if go117ExportTypes {
+ n.SetType(r.exoticType())
+ switch op {
+ case ir.OXDOT:
+ hasSelection := r.bool()
+ // We reconstruct n.Selection for method calls on
+ // generic types and method calls due to type param
+ // bounds. Otherwise, n.Selection is nil.
+ if hasSelection {
+ n1 := ir.NewSelectorExpr(pos, op, expr, sel)
+ AddImplicitDots(n1)
+ var m *types.Field
+ if n1.X.Type().IsTypeParam() {
+ genType := n1.X.Type().Bound()
+ m = Lookdot1(n1, sel, genType, genType.AllMethods(), 1)
+ } else {
+ genType := types.ReceiverBaseType(n1.X.Type())
+ if genType.IsInstantiatedGeneric() {
+ genType = genType.OrigSym.Def.Type()
+ }
+ m = Lookdot1(n1, sel, genType, genType.Methods(), 1)
+ }
+ assert(m != nil)
+ n.Selection = m
+ }
+ case ir.ODOT, ir.ODOTPTR, ir.ODOTINTER:
+ n.Selection = r.exoticField()
+ case ir.ODOTMETH, ir.OMETHVALUE, ir.OMETHEXPR:
+ // These require a Lookup to link to the correct declaration.
+ rcvrType := expr.Type()
+ typ := n.Type()
+ n.Selection = Lookdot(n, rcvrType, 1)
+ if op == ir.OMETHVALUE || op == ir.OMETHEXPR {
+ // Lookdot clobbers the opcode and type, undo that.
+ n.SetOp(op)
+ n.SetType(typ)
+ }
}
}
return n
@@ -1247,7 +1430,7 @@ func (r *importReader) node() ir.Node {
n := ir.NewIndexExpr(r.pos(), r.expr(), r.expr())
if go117ExportTypes {
n.SetOp(op)
- n.SetType(r.typ())
+ n.SetType(r.exoticType())
if op == ir.OINDEXMAP {
n.Assigned = r.bool()
}
@@ -1267,7 +1450,7 @@ func (r *importReader) node() ir.Node {
}
return n
- case ir.OCONV, ir.OCONVIFACE, ir.OCONVNOP, ir.OBYTES2STR, ir.ORUNES2STR, ir.OSTR2BYTES, ir.OSTR2RUNES, ir.ORUNESTR, ir.OSLICE2ARRPTR:
+ case ir.OCONV, ir.OCONVIFACE, ir.OCONVIDATA, ir.OCONVNOP, ir.OBYTES2STR, ir.ORUNES2STR, ir.OSTR2BYTES, ir.OSTR2RUNES, ir.ORUNESTR, ir.OSLICE2ARRPTR:
if !go117ExportTypes && op != ir.OCONV {
// unreachable - mapped to OCONV case by exporter
goto error
@@ -1318,7 +1501,6 @@ func (r *importReader) node() ir.Node {
n.IsDDD = r.bool()
if go117ExportTypes {
n.SetType(r.exoticType())
- n.Use = ir.CallUse(r.uint64())
}
return n
@@ -1343,8 +1525,15 @@ func (r *importReader) node() ir.Node {
n.Args.Append(r.exprList()...)
return n
+ case ir.OLINKSYMOFFSET:
+ pos := r.pos()
+ name := r.string()
+ off := r.uint64()
+ typ := r.typ()
+ return ir.NewLinksymOffsetExpr(pos, Lookup(name).Linksym(), int64(off), typ)
+
// unary expressions
- case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT, ir.ORECV:
+ case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT, ir.ORECV, ir.OIDATA:
n := ir.NewUnaryExpr(r.pos(), op, r.expr())
if go117ExportTypes {
n.SetType(r.typ())
@@ -1368,7 +1557,7 @@ func (r *importReader) node() ir.Node {
// binary expressions
case ir.OADD, ir.OAND, ir.OANDNOT, ir.ODIV, ir.OEQ, ir.OGE, ir.OGT, ir.OLE, ir.OLT,
- ir.OLSH, ir.OMOD, ir.OMUL, ir.ONE, ir.OOR, ir.ORSH, ir.OSUB, ir.OXOR:
+ ir.OLSH, ir.OMOD, ir.OMUL, ir.ONE, ir.OOR, ir.ORSH, ir.OSUB, ir.OXOR, ir.OEFACE:
n := ir.NewBinaryExpr(r.pos(), op, r.expr(), r.expr())
if go117ExportTypes {
n.SetType(r.typ())
@@ -1496,6 +1685,26 @@ func (r *importReader) node() ir.Node {
case ir.OEND:
return nil
+ case ir.OFUNCINST:
+ pos := r.pos()
+ x := r.expr()
+ ntargs := r.uint64()
+ var targs []ir.Node
+ if ntargs > 0 {
+ targs = make([]ir.Node, ntargs)
+ for i := range targs {
+ targs[i] = ir.TypeNode(r.typ())
+ }
+ }
+ n := ir.NewInstExpr(pos, ir.OFUNCINST, x, targs)
+ if go117ExportTypes {
+ n.SetType(r.typ())
+ }
+ return n
+
+ case ir.OSELRECV2:
+ return ir.NewAssignListStmt(r.pos(), ir.OSELRECV2, r.exprList(), r.exprList())
+
default:
base.Fatalf("cannot import %v (%d) node\n"+
"\t==> please file an issue and assign to gri@", op, int(op))
@@ -1517,11 +1726,7 @@ func (r *importReader) op() ir.Op {
func (r *importReader) fieldList() []ir.Node {
list := make([]ir.Node, r.uint64())
for i := range list {
- x := ir.NewStructKeyExpr(r.pos(), r.selector(), r.expr())
- if go117ExportTypes {
- x.Offset = int64(r.uint64())
- }
- list[i] = x
+ list[i] = ir.NewStructKeyExpr(r.pos(), r.exoticField(), r.expr())
}
return list
}
@@ -1544,3 +1749,120 @@ func builtinCall(pos src.XPos, op ir.Op) *ir.CallExpr {
}
return ir.NewCallExpr(pos, ir.OCALL, ir.NewIdent(base.Pos, types.BuiltinPkg.Lookup(ir.OpNames[op])), nil)
}
+
+// NewIncompleteNamedType returns a TFORW type t with name specified by sym, such
+// that t.nod and sym.Def are set correctly.
+func NewIncompleteNamedType(pos src.XPos, sym *types.Sym) *types.Type {
+ name := ir.NewDeclNameAt(pos, ir.OTYPE, sym)
+ forw := types.NewNamed(name)
+ name.SetType(forw)
+ sym.Def = name
+ return forw
+}
+
+// Instantiate creates a new named type which is the instantiation of the base
+// named generic type, with the specified type args.
+func Instantiate(pos src.XPos, baseType *types.Type, targs []*types.Type) *types.Type {
+ baseSym := baseType.Sym()
+ if strings.Index(baseSym.Name, "[") >= 0 {
+ base.Fatalf("arg to Instantiate is not a base generic type")
+ }
+ name := InstTypeName(baseSym.Name, targs)
+ instSym := baseSym.Pkg.Lookup(name)
+ if instSym.Def != nil {
+ // May match existing type from previous import or
+ // types2-to-types1 conversion, or from in-progress instantiation
+ // in the current type import stack.
+ return instSym.Def.Type()
+ }
+
+ t := NewIncompleteNamedType(baseType.Pos(), instSym)
+ t.SetRParams(targs)
+ t.OrigSym = baseSym
+
+ // baseType may still be TFORW or its methods may not be fully filled in
+ // (since we are in the middle of importing it). So, delay call to
+ // substInstType until we get back up to the top of the current top-most
+ // type import.
+ deferredInstStack = append(deferredInstStack, t)
+
+ return t
+}
+
+var deferredInstStack []*types.Type
+var deferInst int
+
+// deferDoInst defers substitution on instantiated types until we are at the
+// top-most defined type, so the base types are fully defined.
+func deferDoInst() {
+ deferInst++
+}
+
+func resumeDoInst() {
+ if deferInst == 1 {
+ for len(deferredInstStack) > 0 {
+ t := deferredInstStack[0]
+ deferredInstStack = deferredInstStack[1:]
+ substInstType(t, t.OrigSym.Def.(*ir.Name).Type(), t.RParams())
+ }
+ }
+ deferInst--
+}
+
+// doInst creates a new instantiation type (which will be added to
+// deferredInstStack for completion later) for an incomplete type encountered
+// during a type substitution for an instantiation. This is needed for
+// instantiations of mutually recursive types.
+func doInst(t *types.Type) *types.Type {
+ return Instantiate(t.Pos(), t.OrigSym.Def.(*ir.Name).Type(), t.RParams())
+}
+
+// substInstType completes the instantiation of a generic type by doing a
+// substitution on the underlying type itself and any methods. t is the
+// instantiation being created, baseType is the base generic type, and targs are
+// the type arguments that baseType is being instantiated with.
+func substInstType(t *types.Type, baseType *types.Type, targs []*types.Type) {
+ subst := Tsubster{
+ Tparams: baseType.RParams(),
+ Targs: targs,
+ SubstForwFunc: doInst,
+ }
+ t.SetUnderlying(subst.Typ(baseType.Underlying()))
+
+ newfields := make([]*types.Field, baseType.Methods().Len())
+ for i, f := range baseType.Methods().Slice() {
+ if !f.IsMethod() || types.IsInterfaceMethod(f.Type) {
+ // Do a normal substitution if this is a non-method (which
+ // means this must be an interface used as a constraint) or
+ // an interface method.
+ t2 := subst.Typ(f.Type)
+ newfields[i] = types.NewField(f.Pos, f.Sym, t2)
+ continue
+ }
+ recvType := f.Type.Recv().Type
+ if recvType.IsPtr() {
+ recvType = recvType.Elem()
+ }
+ // Substitute in the method using the type params used in the
+ // method (not the type params in the definition of the generic type).
+ msubst := Tsubster{
+ Tparams: recvType.RParams(),
+ Targs: targs,
+ SubstForwFunc: doInst,
+ }
+ t2 := msubst.Typ(f.Type)
+ oldsym := f.Nname.Sym()
+ newsym := MakeFuncInstSym(oldsym, targs, true)
+ var nname *ir.Name
+ if newsym.Def != nil {
+ nname = newsym.Def.(*ir.Name)
+ } else {
+ nname = ir.NewNameAt(f.Pos, newsym)
+ nname.SetType(t2)
+ newsym.Def = nname
+ }
+ newfields[i] = types.NewField(f.Pos, f.Sym, t2)
+ newfields[i].Nname = nname
+ }
+ t.Methods().Set(newfields)
+}
diff --git a/src/cmd/compile/internal/typecheck/stmt.go b/src/cmd/compile/internal/typecheck/stmt.go
index 922a01bfbe..c322d490e5 100644
--- a/src/cmd/compile/internal/typecheck/stmt.go
+++ b/src/cmd/compile/internal/typecheck/stmt.go
@@ -172,6 +172,10 @@ assignOK:
r := r.(*ir.TypeAssertExpr)
stmt.SetOp(ir.OAS2DOTTYPE)
r.SetOp(ir.ODOTTYPE2)
+ case ir.ODYNAMICDOTTYPE:
+ r := r.(*ir.DynamicTypeAssertExpr)
+ stmt.SetOp(ir.OAS2DOTTYPE)
+ r.SetOp(ir.ODYNAMICDOTTYPE2)
default:
break assignOK
}
@@ -201,7 +205,6 @@ assignOK:
stmt := stmt.(*ir.AssignListStmt)
stmt.SetOp(ir.OAS2FUNC)
r := rhs[0].(*ir.CallExpr)
- r.Use = ir.CallUseList
rtyp := r.Type()
mismatched := false
@@ -217,7 +220,7 @@ assignOK:
}
}
if mismatched && !failed {
- rewriteMultiValueCall(stmt, r)
+ RewriteMultiValueCall(stmt, r)
}
return
}
@@ -237,6 +240,15 @@ func plural(n int) string {
return "s"
}
+// tcCheckNil typechecks an OCHECKNIL node.
+func tcCheckNil(n *ir.UnaryExpr) ir.Node {
+ n.X = Expr(n.X)
+ if !n.X.Type().IsPtrShaped() {
+ base.FatalfAt(n.Pos(), "%L is not pointer shaped", n.X)
+ }
+ return n
+}
+
// tcFor typechecks an OFOR node.
func tcFor(n *ir.ForStmt) ir.Node {
Stmts(n.Init())
@@ -653,29 +665,18 @@ func tcSwitchType(n *ir.SwitchStmt) {
}
type typeSet struct {
- m map[string][]typeSetEntry
-}
-
-type typeSetEntry struct {
- pos src.XPos
- typ *types.Type
+ m map[string]src.XPos
}
func (s *typeSet) add(pos src.XPos, typ *types.Type) {
if s.m == nil {
- s.m = make(map[string][]typeSetEntry)
+ s.m = make(map[string]src.XPos)
}
- // LongString does not uniquely identify types, so we need to
- // disambiguate collisions with types.Identical.
- // TODO(mdempsky): Add a method that *is* unique.
- ls := typ.LongString()
- prevs := s.m[ls]
- for _, prev := range prevs {
- if types.Identical(typ, prev.typ) {
- base.ErrorfAt(pos, "duplicate case %v in type switch\n\tprevious case at %s", typ, base.FmtPos(prev.pos))
- return
- }
+ ls := typ.LinkString()
+ if prev, ok := s.m[ls]; ok {
+ base.ErrorfAt(pos, "duplicate case %v in type switch\n\tprevious case at %s", typ, base.FmtPos(prev))
+ return
}
- s.m[ls] = append(prevs, typeSetEntry{pos, typ})
+ s.m[ls] = pos
}
diff --git a/src/cmd/compile/internal/typecheck/subr.go b/src/cmd/compile/internal/typecheck/subr.go
index 9ee7a94b1f..7ae10ef406 100644
--- a/src/cmd/compile/internal/typecheck/subr.go
+++ b/src/cmd/compile/internal/typecheck/subr.go
@@ -5,6 +5,7 @@
package typecheck
import (
+ "bytes"
"fmt"
"sort"
"strconv"
@@ -352,9 +353,10 @@ func Assignop(src, dst *types.Type) (ir.Op, string) {
return ir.OCONVNOP, ""
}
- // 2. src and dst have identical underlying types
- // and either src or dst is not a named type or
- // both are empty interface types.
+ // 2. src and dst have identical underlying types and
+ // a. either src or dst is not a named type, or
+ // b. both are empty interface types, or
+ // c. at least one is a gcshape type.
// For assignable but different non-empty interface types,
// we want to recompute the itab. Recomputing the itab ensures
// that itabs are unique (thus an interface with a compile-time
@@ -371,21 +373,24 @@ func Assignop(src, dst *types.Type) (ir.Op, string) {
// which need to have their itab updated.
return ir.OCONVNOP, ""
}
+ if src.IsShape() || dst.IsShape() {
+ // Conversion between a shape type and one of the types
+ // it represents also needs no conversion.
+ return ir.OCONVNOP, ""
+ }
}
// 3. dst is an interface type and src implements dst.
if dst.IsInterface() && src.Kind() != types.TNIL {
var missing, have *types.Field
var ptr int
+ if src.IsShape() {
+ // Shape types implement things they have already
+ // been typechecked to implement, even if they
+ // don't have the methods for them.
+ return ir.OCONVIFACE, ""
+ }
if implements(src, dst, &missing, &have, &ptr) {
- // Call NeedITab/ITabAddr so that (src, dst)
- // gets added to itabs early, which allows
- // us to de-virtualize calls through this
- // type/interface pair later. See CompileITabs in reflect.go
- if types.IsDirectIface(src) && !dst.IsEmptyInterface() {
- NeedITab(src, dst)
- }
-
return ir.OCONVIFACE, ""
}
@@ -722,13 +727,23 @@ func ifacelookdot(s *types.Sym, t *types.Type, ignorecase bool) (m *types.Field,
return m, followptr
}
+// implements reports whether t implements the interface iface. t can be
+// an interface, a type parameter, or a concrete type. If implements returns
+// false, it stores a method of iface that is not implemented in *m. If the
+// method name matches but the type is wrong, it additionally stores the type
+// of the method (on t) in *samename.
func implements(t, iface *types.Type, m, samename **types.Field, ptr *int) bool {
t0 := t
if t == nil {
return false
}
- if t.IsInterface() {
+ if t.IsInterface() || t.IsTypeParam() {
+ if t.IsTypeParam() {
+ // A typeparam satisfies an interface if its type bound
+ // has all the methods of that interface.
+ t = t.Bound()
+ }
i := 0
tms := t.AllMethods().Slice()
for _, im := range iface.AllMethods().Slice() {
@@ -874,3 +889,504 @@ var slist []symlink
type symlink struct {
field *types.Field
}
+
+// TypesOf converts a list of nodes to a list
+// of types of those nodes.
+func TypesOf(x []ir.Node) []*types.Type {
+ r := make([]*types.Type, len(x))
+ for i, n := range x {
+ r[i] = n.Type()
+ }
+ return r
+}
+
+// addTargs writes out the targs to buffer b as a comma-separated list enclosed by
+// brackets.
+func addTargs(b *bytes.Buffer, targs []*types.Type) {
+ b.WriteByte('[')
+ for i, targ := range targs {
+ if i > 0 {
+ b.WriteByte(',')
+ }
+ // Use NameString(), which includes the package name for the local
+ // package, to make sure that type arguments (including type params),
+ // are uniquely specified.
+ tstring := targ.NameString()
+ // types1 uses "interface {" and types2 uses "interface{" - convert
+ // to consistent types2 format. Same for "struct {"
+ tstring = strings.Replace(tstring, "interface {", "interface{", -1)
+ tstring = strings.Replace(tstring, "struct {", "struct{", -1)
+ b.WriteString(tstring)
+ }
+ b.WriteString("]")
+}
+
+// InstTypeName creates a name for an instantiated type, based on the name of the
+// generic type and the type args.
+func InstTypeName(name string, targs []*types.Type) string {
+ b := bytes.NewBufferString(name)
+ addTargs(b, targs)
+ return b.String()
+}
+
+// makeInstName1 returns the name of the generic function instantiated with the
+// given types, which can have type params or shapes, or be concrete types. name is
+// the name of the generic function or method.
+func makeInstName1(name string, targs []*types.Type, hasBrackets bool) string {
+ b := bytes.NewBufferString("")
+ i := strings.Index(name, "[")
+ assert(hasBrackets == (i >= 0))
+ if i >= 0 {
+ b.WriteString(name[0:i])
+ } else {
+ b.WriteString(name)
+ }
+ addTargs(b, targs)
+ if i >= 0 {
+ i2 := strings.LastIndex(name[i:], "]")
+ assert(i2 >= 0)
+ b.WriteString(name[i+i2+1:])
+ }
+ return b.String()
+}
+
+// MakeFuncInstSym makes the unique sym for a stenciled generic function or method,
+// based on the name of the function fnsym and the targs. It replaces any
+// existing bracket type list in the name. MakeInstName asserts that fnsym has
+// brackets in its name if and only if hasBrackets is true.
+//
+// Names of declared generic functions have no brackets originally, so hasBrackets
+// should be false. Names of generic methods already have brackets, since the new
+// type parameter is specified in the generic type of the receiver (e.g. func
+// (func (v *value[T]).set(...) { ... } has the original name (*value[T]).set.
+//
+// The standard naming is something like: 'genFn[int,bool]' for functions and
+// '(*genType[int,bool]).methodName' for methods
+func MakeFuncInstSym(gf *types.Sym, targs []*types.Type, hasBrackets bool) *types.Sym {
+ return gf.Pkg.Lookup(makeInstName1(gf.Name, targs, hasBrackets))
+}
+
+func MakeDictSym(gf *types.Sym, targs []*types.Type, hasBrackets bool) *types.Sym {
+ for _, targ := range targs {
+ if targ.HasTParam() {
+ fmt.Printf("FUNCTION %s\n", gf.Name)
+ for _, targ := range targs {
+ fmt.Printf(" PARAM %+v\n", targ)
+ }
+ panic("dictionary should always have concrete type args")
+ }
+ }
+ name := makeInstName1(gf.Name, targs, hasBrackets)
+ name = ".dict." + name
+ return gf.Pkg.Lookup(name)
+}
+
+func assert(p bool) {
+ base.Assert(p)
+}
+
+// General type substituter, for replacing typeparams with type args.
+type Tsubster struct {
+ Tparams []*types.Type
+ Targs []*types.Type
+ // If non-nil, the substitution map from name nodes in the generic function to the
+ // name nodes in the new stenciled function.
+ Vars map[*ir.Name]*ir.Name
+ // New fully-instantiated generic types whose methods should be instantiated.
+ InstTypeList []*types.Type
+ // If non-nil, function to substitute an incomplete (TFORW) type.
+ SubstForwFunc func(*types.Type) *types.Type
+}
+
+// Typ computes the type obtained by substituting any type parameter in t with the
+// corresponding type argument in subst. If t contains no type parameters, the
+// result is t; otherwise the result is a new type. It deals with recursive types
+// by using TFORW types and finding partially or fully created types via sym.Def.
+func (ts *Tsubster) Typ(t *types.Type) *types.Type {
+ if !t.HasTParam() && t.Kind() != types.TFUNC {
+ // Note: function types need to be copied regardless, as the
+ // types of closures may contain declarations that need
+ // to be copied. See #45738.
+ return t
+ }
+
+ if t.IsTypeParam() {
+ for i, tp := range ts.Tparams {
+ if tp == t {
+ return ts.Targs[i]
+ }
+ }
+ // If t is a simple typeparam T, then t has the name/symbol 'T'
+ // and t.Underlying() == t.
+ //
+ // However, consider the type definition: 'type P[T any] T'. We
+ // might use this definition so we can have a variant of type T
+ // that we can add new methods to. Suppose t is a reference to
+ // P[T]. t has the name 'P[T]', but its kind is TTYPEPARAM,
+ // because P[T] is defined as T. If we look at t.Underlying(), it
+ // is different, because the name of t.Underlying() is 'T' rather
+ // than 'P[T]'. But the kind of t.Underlying() is also TTYPEPARAM.
+ // In this case, we do the needed recursive substitution in the
+ // case statement below.
+ if t.Underlying() == t {
+ // t is a simple typeparam that didn't match anything in tparam
+ return t
+ }
+ // t is a more complex typeparam (e.g. P[T], as above, whose
+ // definition is just T).
+ assert(t.Sym() != nil)
+ }
+
+ var newsym *types.Sym
+ var neededTargs []*types.Type
+ var targsChanged bool
+ var forw *types.Type
+
+ if t.Sym() != nil {
+ // Translate the type params for this type according to
+ // the tparam/targs mapping from subst.
+ neededTargs = make([]*types.Type, len(t.RParams()))
+ for i, rparam := range t.RParams() {
+ neededTargs[i] = ts.Typ(rparam)
+ if !types.Identical(neededTargs[i], rparam) {
+ targsChanged = true
+ }
+ }
+ // For a named (defined) type, we have to change the name of the
+ // type as well. We do this first, so we can look up if we've
+ // already seen this type during this substitution or other
+ // definitions/substitutions.
+ genName := genericTypeName(t.Sym())
+ newsym = t.Sym().Pkg.Lookup(InstTypeName(genName, neededTargs))
+ if newsym.Def != nil {
+ // We've already created this instantiated defined type.
+ return newsym.Def.Type()
+ }
+
+ // In order to deal with recursive generic types, create a TFORW
+ // type initially and set the Def field of its sym, so it can be
+ // found if this type appears recursively within the type.
+ forw = NewIncompleteNamedType(t.Pos(), newsym)
+ //println("Creating new type by sub", newsym.Name, forw.HasTParam())
+ forw.SetRParams(neededTargs)
+ // Copy the OrigSym from the re-instantiated type (which is the sym of
+ // the base generic type).
+ assert(t.OrigSym != nil)
+ forw.OrigSym = t.OrigSym
+ }
+
+ var newt *types.Type
+
+ switch t.Kind() {
+ case types.TTYPEPARAM:
+ if t.Sym() == newsym && !targsChanged {
+ // The substitution did not change the type.
+ return t
+ }
+ // Substitute the underlying typeparam (e.g. T in P[T], see
+ // the example describing type P[T] above).
+ newt = ts.Typ(t.Underlying())
+ assert(newt != t)
+
+ case types.TARRAY:
+ elem := t.Elem()
+ newelem := ts.Typ(elem)
+ if newelem != elem || targsChanged {
+ newt = types.NewArray(newelem, t.NumElem())
+ }
+
+ case types.TPTR:
+ elem := t.Elem()
+ newelem := ts.Typ(elem)
+ if newelem != elem || targsChanged {
+ newt = types.NewPtr(newelem)
+ }
+
+ case types.TSLICE:
+ elem := t.Elem()
+ newelem := ts.Typ(elem)
+ if newelem != elem || targsChanged {
+ newt = types.NewSlice(newelem)
+ }
+
+ case types.TSTRUCT:
+ newt = ts.tstruct(t, targsChanged)
+ if newt == t {
+ newt = nil
+ }
+
+ case types.TFUNC:
+ newrecvs := ts.tstruct(t.Recvs(), false)
+ newparams := ts.tstruct(t.Params(), false)
+ newresults := ts.tstruct(t.Results(), false)
+ // Translate the tparams of a signature.
+ newtparams := ts.tstruct(t.TParams(), false)
+ if newrecvs != t.Recvs() || newparams != t.Params() ||
+ newresults != t.Results() || newtparams != t.TParams() || targsChanged {
+ // If any types have changed, then the all the fields of
+ // of recv, params, and results must be copied, because they have
+ // offset fields that are dependent, and so must have an
+ // independent copy for each new signature.
+ var newrecv *types.Field
+ if newrecvs.NumFields() > 0 {
+ if newrecvs == t.Recvs() {
+ newrecvs = ts.tstruct(t.Recvs(), true)
+ }
+ newrecv = newrecvs.Field(0)
+ }
+ if newparams == t.Params() {
+ newparams = ts.tstruct(t.Params(), true)
+ }
+ if newresults == t.Results() {
+ newresults = ts.tstruct(t.Results(), true)
+ }
+ var tparamfields []*types.Field
+ if newtparams.HasTParam() {
+ tparamfields = newtparams.FieldSlice()
+ } else {
+ // Completely remove the tparams from the resulting
+ // signature, if the tparams are now concrete types.
+ tparamfields = nil
+ }
+ newt = types.NewSignature(t.Pkg(), newrecv, tparamfields,
+ newparams.FieldSlice(), newresults.FieldSlice())
+ }
+
+ case types.TINTER:
+ newt = ts.tinter(t)
+ if newt == t && !targsChanged {
+ newt = nil
+ }
+
+ case types.TMAP:
+ newkey := ts.Typ(t.Key())
+ newval := ts.Typ(t.Elem())
+ if newkey != t.Key() || newval != t.Elem() || targsChanged {
+ newt = types.NewMap(newkey, newval)
+ }
+
+ case types.TCHAN:
+ elem := t.Elem()
+ newelem := ts.Typ(elem)
+ if newelem != elem || targsChanged {
+ newt = types.NewChan(newelem, t.ChanDir())
+ if !newt.HasTParam() {
+ // TODO(danscales): not sure why I have to do this
+ // only for channels.....
+ types.CheckSize(newt)
+ }
+ }
+ case types.TFORW:
+ if ts.SubstForwFunc != nil {
+ newt = ts.SubstForwFunc(t)
+ } else {
+ assert(false)
+ }
+ case types.TINT, types.TINT8, types.TINT16, types.TINT32, types.TINT64,
+ types.TUINT, types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64,
+ types.TUINTPTR, types.TBOOL, types.TSTRING, types.TFLOAT32, types.TFLOAT64, types.TCOMPLEX64, types.TCOMPLEX128:
+ newt = t.Underlying()
+ case types.TUNION:
+ nt := t.NumTerms()
+ newterms := make([]*types.Type, nt)
+ tildes := make([]bool, nt)
+ changed := false
+ for i := 0; i < nt; i++ {
+ term, tilde := t.Term(i)
+ tildes[i] = tilde
+ newterms[i] = ts.Typ(term)
+ if newterms[i] != term {
+ changed = true
+ }
+ }
+ if changed {
+ newt = types.NewUnion(newterms, tildes)
+ }
+ default:
+ panic(fmt.Sprintf("Bad type in (*TSubster).Typ: %v", t.Kind()))
+ }
+ if newt == nil {
+ // Even though there were typeparams in the type, there may be no
+ // change if this is a function type for a function call (which will
+ // have its own tparams/targs in the function instantiation).
+ return t
+ }
+
+ if t.Sym() == nil && t.Kind() != types.TINTER {
+ // Not a named type or interface type, so there was no forwarding type
+ // and there are no methods to substitute.
+ assert(t.Methods().Len() == 0)
+ return newt
+ }
+
+ if forw != nil {
+ forw.SetUnderlying(newt)
+ newt = forw
+ }
+
+ if t.Kind() != types.TINTER && t.Methods().Len() > 0 {
+ // Fill in the method info for the new type.
+ var newfields []*types.Field
+ newfields = make([]*types.Field, t.Methods().Len())
+ for i, f := range t.Methods().Slice() {
+ t2 := ts.Typ(f.Type)
+ oldsym := f.Nname.Sym()
+ newsym := MakeFuncInstSym(oldsym, ts.Targs, true)
+ var nname *ir.Name
+ if newsym.Def != nil {
+ nname = newsym.Def.(*ir.Name)
+ } else {
+ nname = ir.NewNameAt(f.Pos, newsym)
+ nname.SetType(t2)
+ newsym.Def = nname
+ }
+ newfields[i] = types.NewField(f.Pos, f.Sym, t2)
+ newfields[i].Nname = nname
+ }
+ newt.Methods().Set(newfields)
+ if !newt.HasTParam() && !newt.HasShape() {
+ // Generate all the methods for a new fully-instantiated type.
+ ts.InstTypeList = append(ts.InstTypeList, newt)
+ }
+ }
+ return newt
+}
+
+// tstruct substitutes type params in types of the fields of a structure type. For
+// each field, tstruct copies the Nname, and translates it if Nname is in
+// ts.vars. To always force the creation of a new (top-level) struct,
+// regardless of whether anything changed with the types or names of the struct's
+// fields, set force to true.
+func (ts *Tsubster) tstruct(t *types.Type, force bool) *types.Type {
+ if t.NumFields() == 0 {
+ if t.HasTParam() {
+ // For an empty struct, we need to return a new type,
+ // since it may now be fully instantiated (HasTParam
+ // becomes false).
+ return types.NewStruct(t.Pkg(), nil)
+ }
+ return t
+ }
+ var newfields []*types.Field
+ if force {
+ newfields = make([]*types.Field, t.NumFields())
+ }
+ for i, f := range t.Fields().Slice() {
+ t2 := ts.Typ(f.Type)
+ if (t2 != f.Type || f.Nname != nil) && newfields == nil {
+ newfields = make([]*types.Field, t.NumFields())
+ for j := 0; j < i; j++ {
+ newfields[j] = t.Field(j)
+ }
+ }
+ if newfields != nil {
+ // TODO(danscales): make sure this works for the field
+ // names of embedded types (which should keep the name of
+ // the type param, not the instantiated type).
+ newfields[i] = types.NewField(f.Pos, f.Sym, t2)
+ newfields[i].Embedded = f.Embedded
+ if f.IsDDD() {
+ newfields[i].SetIsDDD(true)
+ }
+ if f.Nointerface() {
+ newfields[i].SetNointerface(true)
+ }
+ if f.Nname != nil && ts.Vars != nil {
+ v := ts.Vars[f.Nname.(*ir.Name)]
+ if v != nil {
+ // This is the case where we are
+ // translating the type of the function we
+ // are substituting, so its dcls are in
+ // the subst.ts.vars table, and we want to
+ // change to reference the new dcl.
+ newfields[i].Nname = v
+ } else {
+ // This is the case where we are
+ // translating the type of a function
+ // reference inside the function we are
+ // substituting, so we leave the Nname
+ // value as is.
+ newfields[i].Nname = f.Nname
+ }
+ }
+ }
+ }
+ if newfields != nil {
+ return types.NewStruct(t.Pkg(), newfields)
+ }
+ return t
+
+}
+
+// tinter substitutes type params in types of the methods of an interface type.
+func (ts *Tsubster) tinter(t *types.Type) *types.Type {
+ if t.Methods().Len() == 0 {
+ return t
+ }
+ var newfields []*types.Field
+ for i, f := range t.Methods().Slice() {
+ t2 := ts.Typ(f.Type)
+ if (t2 != f.Type || f.Nname != nil) && newfields == nil {
+ newfields = make([]*types.Field, t.Methods().Len())
+ for j := 0; j < i; j++ {
+ newfields[j] = t.Methods().Index(j)
+ }
+ }
+ if newfields != nil {
+ newfields[i] = types.NewField(f.Pos, f.Sym, t2)
+ }
+ }
+ if newfields != nil {
+ return types.NewInterface(t.Pkg(), newfields)
+ }
+ return t
+}
+
+// genericSym returns the name of the base generic type for the type named by
+// sym. It simply returns the name obtained by removing everything after the
+// first bracket ("[").
+func genericTypeName(sym *types.Sym) string {
+ return sym.Name[0:strings.Index(sym.Name, "[")]
+}
+
+// Shapify takes a concrete type and returns a GCshape type that can
+// be used in place of the input type and still generate identical code.
+// No methods are added - all methods calls directly on a shape should
+// be done by converting to an interface using the dictionary.
+//
+// TODO: this could take the generic function and base its decisions
+// on how that generic function uses this type argument. For instance,
+// if it doesn't use it as a function argument/return value, then
+// we don't need to distinguish int64 and float64 (because they only
+// differ in how they get passed as arguments). For now, we only
+// unify two different types if they are identical in every possible way.
+func Shapify(t *types.Type) *types.Type {
+ assert(!t.HasShape())
+ // Map all types with the same underlying type to the same shape.
+ u := t.Underlying()
+
+ // All pointers have the same shape.
+ // TODO: Make unsafe.Pointer the same shape as normal pointers.
+ if u.Kind() == types.TPTR {
+ u = types.Types[types.TUINT8].PtrTo()
+ }
+
+ if s := shaped[u]; s != nil {
+ return s
+ }
+
+ sym := shapePkg.Lookup(u.LinkString())
+ name := ir.NewDeclNameAt(u.Pos(), ir.OTYPE, sym)
+ s := types.NewNamed(name)
+ s.SetUnderlying(u)
+ s.SetIsShape(true)
+ s.SetHasShape(true)
+ name.SetType(s)
+ name.SetTypecheck(1)
+ shaped[u] = s
+ return s
+}
+
+var shaped = map[*types.Type]*types.Type{}
+
+var shapePkg = types.NewPkg(".shape", ".shape")
diff --git a/src/cmd/compile/internal/typecheck/syms.go b/src/cmd/compile/internal/typecheck/syms.go
index f29af82db2..ed3aaecc5a 100644
--- a/src/cmd/compile/internal/typecheck/syms.go
+++ b/src/cmd/compile/internal/typecheck/syms.go
@@ -75,9 +75,9 @@ func InitRuntime() {
typ := typs[d.typ]
switch d.tag {
case funcTag:
- importfunc(ir.Pkgs.Runtime, src.NoXPos, sym, typ)
+ importfunc(src.NoXPos, sym, typ)
case varTag:
- importvar(ir.Pkgs.Runtime, src.NoXPos, sym, typ)
+ importvar(src.NoXPos, sym, typ)
default:
base.Fatalf("unhandled declaration tag %v", d.tag)
}
diff --git a/src/cmd/compile/internal/typecheck/typecheck.go b/src/cmd/compile/internal/typecheck/typecheck.go
index 359f662369..db1b11c4cf 100644
--- a/src/cmd/compile/internal/typecheck/typecheck.go
+++ b/src/cmd/compile/internal/typecheck/typecheck.go
@@ -13,6 +13,7 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
+ "cmd/internal/src"
)
// Function collecting autotmps generated during typechecking,
@@ -24,7 +25,6 @@ var inimport bool // set during import
var TypecheckAllowed bool
var (
- NeedITab = func(t, itype *types.Type) {}
NeedRuntimeType = func(*types.Type) {}
)
@@ -35,18 +35,10 @@ func Stmt(n ir.Node) ir.Node { return typecheck(n, ctxStmt) }
func Exprs(exprs []ir.Node) { typecheckslice(exprs, ctxExpr) }
func Stmts(stmts []ir.Node) { typecheckslice(stmts, ctxStmt) }
-func Call(call *ir.CallExpr) {
- t := call.X.Type()
- if t == nil {
- panic("misuse of Call")
- }
- ctx := ctxStmt
- if t.NumResults() > 0 {
- ctx = ctxExpr | ctxMultiOK
- }
- if typecheck(call, ctx) != call {
- panic("bad typecheck")
- }
+func Call(pos src.XPos, callee ir.Node, args []ir.Node, dots bool) ir.Node {
+ call := ir.NewCallExpr(pos, ir.OCALL, callee, args)
+ call.IsDDD = dots
+ return typecheck(call, ctxStmt|ctxExpr)
}
func Callee(n ir.Node) ir.Node {
@@ -59,8 +51,8 @@ func FuncBody(n *ir.Func) {
Stmts(n.Body)
CheckUnused(n)
CheckReturn(n)
- if base.Errors() > errorsBefore {
- n.Body = nil // type errors; do not compile
+ if ir.IsBlank(n.Nname) || base.Errors() > errorsBefore {
+ n.Body = nil // blank function or type errors; do not compile
}
}
@@ -777,6 +769,10 @@ func typecheck1(n ir.Node, top int) ir.Node {
n := n.(*ir.CallExpr)
return tcRecover(n)
+ case ir.ORECOVERFP:
+ n := n.(*ir.CallExpr)
+ return tcRecoverFP(n)
+
case ir.OUNSAFEADD:
n := n.(*ir.BinaryExpr)
return tcUnsafeAdd(n)
@@ -787,11 +783,7 @@ func typecheck1(n ir.Node, top int) ir.Node {
case ir.OCLOSURE:
n := n.(*ir.ClosureExpr)
- tcClosure(n, top)
- if n.Type() == nil {
- return n
- }
- return n
+ return tcClosure(n, top)
case ir.OITAB:
n := n.(*ir.UnaryExpr)
@@ -814,6 +806,14 @@ func typecheck1(n ir.Node, top int) ir.Node {
n.SetType(types.Types[types.TUINTPTR])
return n
+ case ir.OGETCALLERPC, ir.OGETCALLERSP:
+ n := n.(*ir.CallExpr)
+ if len(n.Args) != 0 {
+ base.FatalfAt(n.Pos(), "unexpected arguments: %v", n)
+ }
+ n.SetType(types.Types[types.TUINTPTR])
+ return n
+
case ir.OCONVNOP:
n := n.(*ir.ConvExpr)
n.X = Expr(n.X)
@@ -881,6 +881,10 @@ func typecheck1(n ir.Node, top int) ir.Node {
n := n.(*ir.TailCallStmt)
return n
+ case ir.OCHECKNIL:
+ n := n.(*ir.UnaryExpr)
+ return tcCheckNil(n)
+
case ir.OSELECT:
tcSelect(n.(*ir.SelectStmt))
return n
@@ -951,12 +955,12 @@ func typecheckargs(n ir.InitNode) {
}
// Rewrite f(g()) into t1, t2, ... = g(); f(t1, t2, ...).
- rewriteMultiValueCall(n, list[0])
+ RewriteMultiValueCall(n, list[0])
}
-// rewriteMultiValueCall rewrites multi-valued f() to use temporaries,
+// RewriteMultiValueCall rewrites multi-valued f() to use temporaries,
// so the backend wouldn't need to worry about tuple-valued expressions.
-func rewriteMultiValueCall(n ir.InitNode, call ir.Node) {
+func RewriteMultiValueCall(n ir.InitNode, call ir.Node) {
// If we're outside of function context, then this call will
// be executed during the generated init function. However,
// init.go hasn't yet created it. Instead, associate the
@@ -1913,11 +1917,6 @@ func typecheckdef(n *ir.Name) {
n.SetDiag(true)
goto ret
}
- // For package-level type aliases, set n.Sym.Def so we can identify
- // it as a type alias during export. See also #31959.
- if n.Curfn == nil {
- n.Sym().Def = n.Ntype
- }
}
break
}
diff --git a/src/cmd/compile/internal/typecheck/universe.go b/src/cmd/compile/internal/typecheck/universe.go
index de185ab944..54f3c89c24 100644
--- a/src/cmd/compile/internal/typecheck/universe.go
+++ b/src/cmd/compile/internal/typecheck/universe.go
@@ -158,6 +158,15 @@ func InitUniverse() {
s.Def = n
types.CalcSize(types.ErrorType)
+ // comparable type (interface)
+ s = types.BuiltinPkg.Lookup("comparable")
+ n = ir.NewDeclNameAt(src.NoXPos, ir.OTYPE, s)
+ types.ComparableType = types.NewNamed(n)
+ types.ComparableType.SetUnderlying(makeComparableInterface())
+ n.SetType(types.ComparableType)
+ s.Def = n
+ types.CalcSize(types.ComparableType)
+
types.Types[types.TUNSAFEPTR] = defBasic(types.TUNSAFEPTR, ir.Pkgs.Unsafe, "Pointer")
// simple aliases
@@ -338,6 +347,12 @@ func makeErrorInterface() *types.Type {
return types.NewInterface(types.NoPkg, []*types.Field{method})
}
+func makeComparableInterface() *types.Type {
+ sig := types.NewSignature(types.NoPkg, fakeRecvField(), nil, nil, nil)
+ method := types.NewField(src.NoXPos, Lookup("=="), sig)
+ return types.NewInterface(types.NoPkg, []*types.Field{method})
+}
+
// DeclareUniverse makes the universe block visible within the current package.
func DeclareUniverse() {
// Operationally, this is similar to a dot import of builtinpkg, except
diff --git a/src/cmd/compile/internal/types/fmt.go b/src/cmd/compile/internal/types/fmt.go
index 8b988952a7..0824f6d093 100644
--- a/src/cmd/compile/internal/types/fmt.go
+++ b/src/cmd/compile/internal/types/fmt.go
@@ -239,17 +239,37 @@ func (t *Type) String() string {
return tconv(t, 0, fmtGo)
}
-// ShortString generates a short description of t.
-// It is used in autogenerated method names, reflection,
-// and itab names.
-func (t *Type) ShortString() string {
+// LinkString returns an unexpanded string description of t, suitable
+// for use in link symbols. "Unexpanded" here means that the
+// description uses `"".` to qualify identifiers from the current
+// package, and "expansion" refers to the renaming step performed by
+// the linker to replace these qualifiers with proper `path/to/pkg.`
+// qualifiers.
+//
+// After expansion, the description corresponds to type identity. That
+// is, for any pair of types t1 and t2, Identical(t1, t2) and
+// expand(t1.LinkString()) == expand(t2.LinkString()) report the same
+// value.
+//
+// Within a single compilation unit, LinkString always returns the
+// same unexpanded description for identical types. Thus it's safe to
+// use as a map key to implement a type-identity-keyed map. However,
+// make sure all LinkString calls used for this purpose happen within
+// the same compile process; the string keys are not stable across
+// multiple processes.
+func (t *Type) LinkString() string {
return tconv(t, 0, fmtTypeID)
}
-// LongString generates a complete description of t.
-// It is useful for reflection,
-// or when a unique fingerprint or hash of a type is required.
-func (t *Type) LongString() string {
+// NameString generates a user-readable, mostly unique string
+// description of t. NameString always returns the same description
+// for identical types, even across compilation units.
+//
+// NameString qualifies identifiers by package name, so it has
+// collisions when different packages share the same names and
+// identifiers. It also does not distinguish function-scope defined
+// types from package-scoped defined types or from each other.
+func (t *Type) NameString() string {
return tconv(t, 0, fmtTypeIDName)
}
@@ -316,31 +336,34 @@ func tconv2(b *bytes.Buffer, t *Type, verb rune, mode fmtMode, visited map[*Type
// Unless the 'L' flag was specified, if the type has a name, just print that name.
if verb != 'L' && t.Sym() != nil && t != Types[t.Kind()] {
- switch mode {
- case fmtTypeID, fmtTypeIDName:
- if verb == 'S' {
- if t.Vargen != 0 {
- sconv2(b, t.Sym(), 'S', mode)
- fmt.Fprintf(b, "·%d", t.Vargen)
- return
- }
- sconv2(b, t.Sym(), 'S', mode)
- return
- }
+ // Default to 'v' if verb is invalid.
+ if verb != 'S' {
+ verb = 'v'
+ }
- if mode == fmtTypeIDName {
- sconv2(b, t.Sym(), 'v', fmtTypeIDName)
- return
+ // In unified IR, function-scope defined types will have a ·N
+ // suffix embedded directly in their Name. Trim this off for
+ // non-fmtTypeID modes.
+ sym := t.Sym()
+ if mode != fmtTypeID {
+ i := len(sym.Name)
+ for i > 0 && sym.Name[i-1] >= '0' && sym.Name[i-1] <= '9' {
+ i--
}
-
- if t.Sym().Pkg == LocalPkg && t.Vargen != 0 {
- sconv2(b, t.Sym(), 'v', mode)
- fmt.Fprintf(b, "·%d", t.Vargen)
- return
+ const dot = "·"
+ if i >= len(dot) && sym.Name[i-len(dot):i] == dot {
+ sym = &Sym{Pkg: sym.Pkg, Name: sym.Name[:i-len(dot)]}
}
}
-
- sconv2(b, t.Sym(), 'v', mode)
+ sconv2(b, sym, verb, mode)
+
+ // TODO(mdempsky): Investigate including Vargen in fmtTypeIDName
+ // output too. It seems like it should, but that mode is currently
+ // used in string representation used by reflection, which is
+ // user-visible and doesn't expect this.
+ if mode == fmtTypeID && t.Vargen != 0 {
+ fmt.Fprintf(b, "·%d", t.Vargen)
+ }
return
}
@@ -567,6 +590,18 @@ func tconv2(b *bytes.Buffer, t *Type, verb rune, mode fmtMode, visited map[*Type
b.WriteString(fmt.Sprintf("%p", t))
}
+ case TUNION:
+ for i := 0; i < t.NumTerms(); i++ {
+ if i > 0 {
+ b.WriteString("|")
+ }
+ elem, tilde := t.Term(i)
+ if tilde {
+ b.WriteString("~")
+ }
+ tconv2(b, elem, 0, mode, visited)
+ }
+
case Txxx:
b.WriteString("Txxx")
@@ -671,7 +706,7 @@ func FmtConst(v constant.Value, sharp bool) string {
// TypeHash computes a hash value for type t to use in type switch statements.
func TypeHash(t *Type) uint32 {
- p := t.LongString()
+ p := t.NameString()
// Using MD5 is overkill, but reduces accidental collisions.
h := md5.Sum([]byte(p))
diff --git a/src/cmd/compile/internal/types/identity.go b/src/cmd/compile/internal/types/identity.go
index dde9f51856..2e9e2f4fd8 100644
--- a/src/cmd/compile/internal/types/identity.go
+++ b/src/cmd/compile/internal/types/identity.go
@@ -4,8 +4,11 @@
package types
-// Identical reports whether t1 and t2 are identical types, following
-// the spec rules. Receiver parameter types are ignored.
+// Identical reports whether t1 and t2 are identical types, following the spec rules.
+// Receiver parameter types are ignored. Named (defined) types are only equal if they
+// are pointer-equal - i.e. there must be a unique types.Type for each specific named
+// type. Also, a type containing a shape type is considered identical to another type
+// (shape or not) if their underlying types are the same, or they are both pointers.
func Identical(t1, t2 *Type) bool {
return identical(t1, t2, true, nil)
}
@@ -29,6 +32,14 @@ func identical(t1, t2 *Type, cmpTags bool, assumedEqual map[typePair]struct{}) b
return false
}
if t1.sym != nil || t2.sym != nil {
+ if t1.HasShape() || t2.HasShape() {
+ switch t1.kind {
+ case TINT8, TUINT8, TINT16, TUINT16, TINT32, TUINT32, TINT64, TUINT64, TINT, TUINT, TUINTPTR, TCOMPLEX64, TCOMPLEX128, TFLOAT32, TFLOAT64, TBOOL, TSTRING, TPTR, TUNSAFEPTR:
+ return true
+ }
+ // fall through to unnamed type comparison for complex types.
+ goto cont
+ }
// Special case: we keep byte/uint8 and rune/int32
// separate for error messages. Treat them as equal.
switch t1.kind {
@@ -40,6 +51,7 @@ func identical(t1, t2 *Type, cmpTags bool, assumedEqual map[typePair]struct{}) b
return false
}
}
+cont:
// Any cyclic type must go through a named type, and if one is
// named, it is only identical to the other if they are the
diff --git a/src/cmd/compile/internal/types/kind_string.go b/src/cmd/compile/internal/types/kind_string.go
index ae24a58b92..3e6a8bc064 100644
--- a/src/cmd/compile/internal/types/kind_string.go
+++ b/src/cmd/compile/internal/types/kind_string.go
@@ -38,20 +38,21 @@ func _() {
_ = x[TSTRING-27]
_ = x[TUNSAFEPTR-28]
_ = x[TTYPEPARAM-29]
- _ = x[TIDEAL-30]
- _ = x[TNIL-31]
- _ = x[TBLANK-32]
- _ = x[TFUNCARGS-33]
- _ = x[TCHANARGS-34]
- _ = x[TSSA-35]
- _ = x[TTUPLE-36]
- _ = x[TRESULTS-37]
- _ = x[NTYPE-38]
+ _ = x[TUNION-30]
+ _ = x[TIDEAL-31]
+ _ = x[TNIL-32]
+ _ = x[TBLANK-33]
+ _ = x[TFUNCARGS-34]
+ _ = x[TCHANARGS-35]
+ _ = x[TSSA-36]
+ _ = x[TTUPLE-37]
+ _ = x[TRESULTS-38]
+ _ = x[NTYPE-39]
}
-const _Kind_name = "xxxINT8UINT8INT16UINT16INT32UINT32INT64UINT64INTUINTUINTPTRCOMPLEX64COMPLEX128FLOAT32FLOAT64BOOLPTRFUNCSLICEARRAYSTRUCTCHANMAPINTERFORWANYSTRINGUNSAFEPTRTYPEPARAMIDEALNILBLANKFUNCARGSCHANARGSSSATUPLERESULTSNTYPE"
+const _Kind_name = "xxxINT8UINT8INT16UINT16INT32UINT32INT64UINT64INTUINTUINTPTRCOMPLEX64COMPLEX128FLOAT32FLOAT64BOOLPTRFUNCSLICEARRAYSTRUCTCHANMAPINTERFORWANYSTRINGUNSAFEPTRTYPEPARAMUNIONIDEALNILBLANKFUNCARGSCHANARGSSSATUPLERESULTSNTYPE"
-var _Kind_index = [...]uint8{0, 3, 7, 12, 17, 23, 28, 34, 39, 45, 48, 52, 59, 68, 78, 85, 92, 96, 99, 103, 108, 113, 119, 123, 126, 131, 135, 138, 144, 153, 162, 167, 170, 175, 183, 191, 194, 199, 206, 211}
+var _Kind_index = [...]uint8{0, 3, 7, 12, 17, 23, 28, 34, 39, 45, 48, 52, 59, 68, 78, 85, 92, 96, 99, 103, 108, 113, 119, 123, 126, 131, 135, 138, 144, 153, 162, 167, 172, 175, 180, 188, 196, 199, 204, 211, 216}
func (i Kind) String() string {
if i >= Kind(len(_Kind_index)-1) {
diff --git a/src/cmd/compile/internal/types/pkg.go b/src/cmd/compile/internal/types/pkg.go
index a6d2e2007b..f63a357f0d 100644
--- a/src/cmd/compile/internal/types/pkg.go
+++ b/src/cmd/compile/internal/types/pkg.go
@@ -137,7 +137,3 @@ func CleanroomDo(f func()) {
f()
pkgMap = saved
}
-
-func IsDotAlias(sym *Sym) bool {
- return sym.Def != nil && sym.Def.Sym() != sym
-}
diff --git a/src/cmd/compile/internal/types/size.go b/src/cmd/compile/internal/types/size.go
index f0e695ab96..89391ade68 100644
--- a/src/cmd/compile/internal/types/size.go
+++ b/src/cmd/compile/internal/types/size.go
@@ -90,6 +90,26 @@ func expandiface(t *Type) {
methods = append(methods, m)
}
+ {
+ methods := t.Methods().Slice()
+ sort.SliceStable(methods, func(i, j int) bool {
+ mi, mj := methods[i], methods[j]
+
+ // Sort embedded types by type name (if any).
+ if mi.Sym == nil && mj.Sym == nil {
+ return mi.Type.Sym().Less(mj.Type.Sym())
+ }
+
+ // Sort methods before embedded types.
+ if mi.Sym == nil || mj.Sym == nil {
+ return mi.Sym != nil
+ }
+
+ // Sort methods by symbol name.
+ return mi.Sym.Less(mj.Sym)
+ })
+ }
+
for _, m := range t.Methods().Slice() {
if m.Sym == nil {
continue
@@ -104,8 +124,17 @@ func expandiface(t *Type) {
continue
}
+ if m.Type.IsUnion() {
+ continue
+ }
+
+ // In 1.18, embedded types can be anything. In Go 1.17, we disallow
+ // embedding anything other than interfaces.
if !m.Type.IsInterface() {
- base.ErrorfAt(m.Pos, "interface contains embedded non-interface %v", m.Type)
+ if AllowsGoVersion(t.Pkg(), 1, 18) {
+ continue
+ }
+ base.ErrorfAt(m.Pos, "interface contains embedded non-interface, non-union %v", m.Type)
m.SetBroke(true)
t.SetBroke(true)
// Add to fields so that error messages
@@ -120,10 +149,15 @@ func expandiface(t *Type) {
// (including broken ones, if any) and add to t's
// method set.
for _, t1 := range m.Type.AllMethods().Slice() {
- // Use m.Pos rather than t1.Pos to preserve embedding position.
f := NewField(m.Pos, t1.Sym, t1.Type)
addMethod(f, false)
+
+ // Clear position after typechecking, for consistency with types2.
+ f.Pos = src.NoXPos
}
+
+ // Clear position after typechecking, for consistency with types2.
+ m.Pos = src.NoXPos
}
sort.Sort(MethodsByName(methods))
@@ -405,6 +439,12 @@ func CalcSize(t *Type) {
t.Align = uint8(PtrSize)
expandiface(t)
+ case TUNION:
+ // Always part of an interface for now, so size/align don't matter.
+ // Pretend a union is represented like an interface.
+ w = 2 * int64(PtrSize)
+ t.Align = uint8(PtrSize)
+
case TCHAN: // implemented as pointer
w = int64(PtrSize)
diff --git a/src/cmd/compile/internal/types/sizeof_test.go b/src/cmd/compile/internal/types/sizeof_test.go
index 7028938742..7349e52a73 100644
--- a/src/cmd/compile/internal/types/sizeof_test.go
+++ b/src/cmd/compile/internal/types/sizeof_test.go
@@ -21,7 +21,7 @@ func TestSizeof(t *testing.T) {
_64bit uintptr // size on 64bit platforms
}{
{Sym{}, 44, 72},
- {Type{}, 60, 104},
+ {Type{}, 64, 112},
{Map{}, 20, 40},
{Forward{}, 20, 32},
{Func{}, 28, 48},
diff --git a/src/cmd/compile/internal/types/sort.go b/src/cmd/compile/internal/types/sort.go
index dc59b06415..765c070cd9 100644
--- a/src/cmd/compile/internal/types/sort.go
+++ b/src/cmd/compile/internal/types/sort.go
@@ -4,11 +4,16 @@
package types
-// MethodsByName sorts methods by symbol.
+// MethodsByName sorts methods by name.
type MethodsByName []*Field
-func (x MethodsByName) Len() int { return len(x) }
+func (x MethodsByName) Len() int { return len(x) }
+func (x MethodsByName) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
+func (x MethodsByName) Less(i, j int) bool { return x[i].Sym.Less(x[j].Sym) }
-func (x MethodsByName) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
+// EmbeddedsByName sorts embedded types by name.
+type EmbeddedsByName []*Field
-func (x MethodsByName) Less(i, j int) bool { return x[i].Sym.Less(x[j].Sym) }
+func (x EmbeddedsByName) Len() int { return len(x) }
+func (x EmbeddedsByName) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
+func (x EmbeddedsByName) Less(i, j int) bool { return x[i].Type.Sym().Less(x[j].Type.Sym()) }
diff --git a/src/cmd/compile/internal/types/sym.go b/src/cmd/compile/internal/types/sym.go
index 534cf7e237..fb642f52f8 100644
--- a/src/cmd/compile/internal/types/sym.go
+++ b/src/cmd/compile/internal/types/sym.go
@@ -110,6 +110,14 @@ func (a *Sym) Less(b *Sym) bool {
return false
}
+ // Nil before non-nil.
+ if a == nil {
+ return true
+ }
+ if b == nil {
+ return false
+ }
+
// Exported symbols before non-exported.
ea := IsExported(a.Name)
eb := IsExported(b.Name)
diff --git a/src/cmd/compile/internal/types/type.go b/src/cmd/compile/internal/types/type.go
index 1a9aa6916a..875b0ba82f 100644
--- a/src/cmd/compile/internal/types/type.go
+++ b/src/cmd/compile/internal/types/type.go
@@ -8,6 +8,7 @@ import (
"cmd/compile/internal/base"
"cmd/internal/src"
"fmt"
+ "strings"
"sync"
)
@@ -73,6 +74,7 @@ const (
TSTRING
TUNSAFEPTR
TTYPEPARAM
+ TUNION
// pseudo-types for literals
TIDEAL // untyped numeric constants
@@ -121,6 +123,8 @@ var (
// Predeclared error interface type.
ErrorType *Type
+ // Predeclared comparable interface type.
+ ComparableType *Type
// Types to represent untyped string and boolean constants.
UntypedString = New(TSTRING)
@@ -151,7 +155,7 @@ type Type struct {
// TARRAY: *Array
// TSLICE: Slice
// TSSA: string
- // TTYPEPARAM: *Interface (though we may not need to store/use the Interface info)
+ // TTYPEPARAM: *Typeparam
Extra interface{}
// Width is the width of this Type in bytes.
@@ -182,12 +186,19 @@ type Type struct {
flags bitset8
// For defined (named) generic types, a pointer to the list of type params
- // (in order) of this type that need to be instantiated. For
- // fully-instantiated generic types, this is the targs used to instantiate
- // them (which are used when generating the corresponding instantiated
- // methods). rparams is only set for named types that are generic or are
- // fully-instantiated from a generic type, and is otherwise set to nil.
+ // (in order) of this type that need to be instantiated. For instantiated
+ // generic types, this is the targs used to instantiate them. These targs
+ // may be typeparams (for re-instantiated types such as Value[T2]) or
+ // concrete types (for fully instantiated types such as Value[int]).
+ // rparams is only set for named types that are generic or are fully
+ // instantiated from a generic type, and is otherwise set to nil.
+ // TODO(danscales): choose a better name.
rparams *[]*Type
+
+ // For an instantiated generic type, the symbol for the base generic type.
+ // This backpointer is useful, because the base type is the type that has
+ // the method bodies.
+ OrigSym *Sym
}
func (*Type) CanBeAnSSAAux() {}
@@ -199,6 +210,8 @@ const (
typeDeferwidth // width computation has been deferred and type is on deferredTypeStack
typeRecur
typeHasTParam // there is a typeparam somewhere in the type (generic function or type)
+ typeIsShape // represents a set of closely related types, for generics
+ typeHasShape // there is a shape somewhere in the type
)
func (t *Type) NotInHeap() bool { return t.flags&typeNotInHeap != 0 }
@@ -207,13 +220,21 @@ func (t *Type) Noalg() bool { return t.flags&typeNoalg != 0 }
func (t *Type) Deferwidth() bool { return t.flags&typeDeferwidth != 0 }
func (t *Type) Recur() bool { return t.flags&typeRecur != 0 }
func (t *Type) HasTParam() bool { return t.flags&typeHasTParam != 0 }
+func (t *Type) IsShape() bool { return t.flags&typeIsShape != 0 }
+func (t *Type) HasShape() bool { return t.flags&typeHasShape != 0 }
func (t *Type) SetNotInHeap(b bool) { t.flags.set(typeNotInHeap, b) }
func (t *Type) SetBroke(b bool) { t.flags.set(typeBroke, b) }
func (t *Type) SetNoalg(b bool) { t.flags.set(typeNoalg, b) }
func (t *Type) SetDeferwidth(b bool) { t.flags.set(typeDeferwidth, b) }
func (t *Type) SetRecur(b bool) { t.flags.set(typeRecur, b) }
-func (t *Type) SetHasTParam(b bool) { t.flags.set(typeHasTParam, b) }
+
+// Generic types should never have alg functions.
+func (t *Type) SetHasTParam(b bool) { t.flags.set(typeHasTParam, b); t.flags.set(typeNoalg, b) }
+
+// Should always do SetHasShape(true) when doing SeIsShape(true).
+func (t *Type) SetIsShape(b bool) { t.flags.set(typeIsShape, b) }
+func (t *Type) SetHasShape(b bool) { t.flags.set(typeHasShape, b) }
// Kind returns the kind of type t.
func (t *Type) Kind() Kind { return t.kind }
@@ -255,9 +276,6 @@ func (t *Type) SetRParams(rparams []*Type) {
base.Fatalf("Setting nil or zero-length rparams")
}
t.rparams = &rparams
- if t.HasTParam() {
- return
- }
// HasTParam should be set if any rparam is or has a type param. This is
// to handle the case of a generic type which doesn't reference any of its
// type params (e.g. most commonly, an empty struct).
@@ -266,9 +284,33 @@ func (t *Type) SetRParams(rparams []*Type) {
t.SetHasTParam(true)
break
}
+ if rparam.HasShape() {
+ t.SetHasShape(true)
+ break
+ }
}
}
+// IsBaseGeneric returns true if t is a generic type (not reinstantiated with
+// another type params or fully instantiated.
+func (t *Type) IsBaseGeneric() bool {
+ return len(t.RParams()) > 0 && strings.Index(t.Sym().Name, "[") < 0
+}
+
+// IsInstantiatedGeneric returns t if t ia generic type that has been
+// reinstantiated with new typeparams (i.e. is not fully instantiated).
+func (t *Type) IsInstantiatedGeneric() bool {
+ return len(t.RParams()) > 0 && strings.Index(t.Sym().Name, "[") >= 0 &&
+ t.HasTParam()
+}
+
+// IsFullyInstantiated reports whether t is a fully instantiated generic type; i.e. an
+// instantiated generic type where all type arguments are non-generic or fully
+// instantiated generic types.
+func (t *Type) IsFullyInstantiated() bool {
+ return len(t.RParams()) > 0 && !t.HasTParam()
+}
+
// NoPkg is a nil *Pkg value for clarity.
// It's intended for use when constructing types that aren't exported
// and thus don't need to be associated with any package.
@@ -377,6 +419,18 @@ type Interface struct {
pkg *Pkg
}
+// Typeparam contains Type fields specific to typeparam types.
+type Typeparam struct {
+ index int // type parameter index in source order, starting at 0
+ bound *Type
+}
+
+// Union contains Type fields specific to union types.
+type Union struct {
+ terms []*Type
+ tildes []bool // whether terms[i] is of form ~T
+}
+
// Ptr contains Type fields specific to pointer types.
type Ptr struct {
Elem *Type // element type
@@ -558,7 +612,9 @@ func New(et Kind) *Type {
case TRESULTS:
t.Extra = new(Results)
case TTYPEPARAM:
- t.Extra = new(Interface)
+ t.Extra = new(Typeparam)
+ case TUNION:
+ t.Extra = new(Union)
}
return t
}
@@ -574,6 +630,9 @@ func NewArray(elem *Type, bound int64) *Type {
if elem.HasTParam() {
t.SetHasTParam(true)
}
+ if elem.HasShape() {
+ t.SetHasShape(true)
+ }
return t
}
@@ -592,6 +651,9 @@ func NewSlice(elem *Type) *Type {
if elem.HasTParam() {
t.SetHasTParam(true)
}
+ if elem.HasShape() {
+ t.SetHasShape(true)
+ }
return t
}
@@ -604,6 +666,9 @@ func NewChan(elem *Type, dir ChanDir) *Type {
if elem.HasTParam() {
t.SetHasTParam(true)
}
+ if elem.HasShape() {
+ t.SetHasShape(true)
+ }
return t
}
@@ -614,6 +679,9 @@ func NewTuple(t1, t2 *Type) *Type {
if t1.HasTParam() || t2.HasTParam() {
t.SetHasTParam(true)
}
+ if t1.HasShape() || t2.HasShape() {
+ t.SetHasShape(true)
+ }
return t
}
@@ -645,6 +713,9 @@ func NewMap(k, v *Type) *Type {
if k.HasTParam() || v.HasTParam() {
t.SetHasTParam(true)
}
+ if k.HasShape() || v.HasShape() {
+ t.SetHasShape(true)
+ }
return t
}
@@ -669,6 +740,9 @@ func NewPtr(elem *Type) *Type {
// when this entry was cached.
t.SetHasTParam(true)
}
+ if elem.HasShape() {
+ t.SetHasShape(true)
+ }
return t
}
@@ -682,6 +756,9 @@ func NewPtr(elem *Type) *Type {
if elem.HasTParam() {
t.SetHasTParam(true)
}
+ if elem.HasShape() {
+ t.SetHasShape(true)
+ }
return t
}
@@ -825,6 +902,8 @@ func (t *Type) copy() *Type {
case TARRAY:
x := *t.Extra.(*Array)
nt.Extra = &x
+ case TTYPEPARAM:
+ base.Fatalf("typeparam types cannot be copied")
case TTUPLE, TSSA, TRESULTS:
base.Fatalf("ssa types cannot be copied")
}
@@ -925,7 +1004,7 @@ func (t *Type) FuncArgs() *Type {
return t.Extra.(FuncArgs).T
}
-// IsFuncArgStruct reports whether t is a struct representing function parameters.
+// IsFuncArgStruct reports whether t is a struct representing function parameters or results.
func (t *Type) IsFuncArgStruct() bool {
return t.kind == TSTRUCT && t.Extra.(*Struct).Funarg != FunargNone
}
@@ -1436,6 +1515,14 @@ func (t *Type) IsInterface() bool {
return t.kind == TINTER
}
+func (t *Type) IsUnion() bool {
+ return t.kind == TUNION
+}
+
+func (t *Type) IsTypeParam() bool {
+ return t.kind == TTYPEPARAM
+}
+
// IsEmptyInterface reports whether t is an empty interface type.
func (t *Type) IsEmptyInterface() bool {
return t.IsInterface() && t.AllMethods().Len() == 0
@@ -1708,6 +1795,9 @@ func (t *Type) SetUnderlying(underlying *Type) {
if underlying.HasTParam() {
t.SetHasTParam(true)
}
+ if underlying.HasShape() {
+ t.SetHasShape(true)
+ }
// spec: "The declared type does not inherit any methods bound
// to the existing type, but the method set of an interface
@@ -1739,6 +1829,15 @@ func fieldsHasTParam(fields []*Field) bool {
return false
}
+func fieldsHasShape(fields []*Field) bool {
+ for _, f := range fields {
+ if f.Type != nil && f.Type.HasShape() {
+ return true
+ }
+ }
+ return false
+}
+
// NewBasic returns a new basic type of the given kind.
func NewBasic(kind Kind, obj Object) *Type {
t := New(kind)
@@ -1758,6 +1857,10 @@ func NewInterface(pkg *Pkg, methods []*Field) *Type {
t.SetHasTParam(true)
break
}
+ if f.Type != nil && f.Type.HasShape() {
+ t.SetHasShape(true)
+ break
+ }
}
if anyBroke(methods) {
t.SetBroke(true)
@@ -1766,14 +1869,75 @@ func NewInterface(pkg *Pkg, methods []*Field) *Type {
return t
}
-// NewTypeParam returns a new type param.
-func NewTypeParam(pkg *Pkg) *Type {
+// NewTypeParam returns a new type param with the specified sym (package and name)
+// and specified index within the typeparam list.
+func NewTypeParam(sym *Sym, index int) *Type {
t := New(TTYPEPARAM)
- t.Extra.(*Interface).pkg = pkg
+ t.sym = sym
+ t.Extra.(*Typeparam).index = index
t.SetHasTParam(true)
return t
}
+// Index returns the index of the type param within its param list.
+func (t *Type) Index() int {
+ t.wantEtype(TTYPEPARAM)
+ return t.Extra.(*Typeparam).index
+}
+
+// SetIndex sets the index of the type param within its param list.
+func (t *Type) SetIndex(i int) {
+ t.wantEtype(TTYPEPARAM)
+ t.Extra.(*Typeparam).index = i
+}
+
+// SetBound sets the bound of a typeparam.
+func (t *Type) SetBound(bound *Type) {
+ t.wantEtype(TTYPEPARAM)
+ t.Extra.(*Typeparam).bound = bound
+}
+
+// Bound returns the bound of a typeparam.
+func (t *Type) Bound() *Type {
+ t.wantEtype(TTYPEPARAM)
+ return t.Extra.(*Typeparam).bound
+}
+
+// NewUnion returns a new union with the specified set of terms (types). If
+// tildes[i] is true, then terms[i] represents ~T, rather than just T.
+func NewUnion(terms []*Type, tildes []bool) *Type {
+ t := New(TUNION)
+ if len(terms) != len(tildes) {
+ base.Fatalf("Mismatched terms and tildes for NewUnion")
+ }
+ t.Extra.(*Union).terms = terms
+ t.Extra.(*Union).tildes = tildes
+ nt := len(terms)
+ for i := 0; i < nt; i++ {
+ if terms[i].HasTParam() {
+ t.SetHasTParam(true)
+ }
+ if terms[i].HasShape() {
+ t.SetHasShape(true)
+ }
+ }
+ return t
+}
+
+// NumTerms returns the number of terms in a union type.
+func (t *Type) NumTerms() int {
+ t.wantEtype(TUNION)
+ return len(t.Extra.(*Union).terms)
+}
+
+// Term returns ith term of a union type as (term, tilde). If tilde is true, term
+// represents ~T, rather than just T.
+func (t *Type) Term(i int) (*Type, bool) {
+ t.wantEtype(TUNION)
+ u := t.Extra.(*Union)
+ return u.terms[i], u.tildes[i]
+}
+
const BOGUS_FUNARG_OFFSET = -1000000000
func unzeroFieldOffsets(f []*Field) {
@@ -1817,6 +1981,9 @@ func NewSignature(pkg *Pkg, recv *Field, tparams, params, results []*Field) *Typ
fieldsHasTParam(results) {
t.SetHasTParam(true)
}
+ if fieldsHasShape(recvs) || fieldsHasShape(params) || fieldsHasShape(results) {
+ t.SetHasShape(true)
+ }
return t
}
@@ -1832,6 +1999,9 @@ func NewStruct(pkg *Pkg, fields []*Field) *Type {
if fieldsHasTParam(fields) {
t.SetHasTParam(true)
}
+ if fieldsHasShape(fields) {
+ t.SetHasShape(true)
+ }
return t
}
@@ -2028,7 +2198,7 @@ func TypeSymLookup(name string) *Sym {
}
func TypeSymName(t *Type) string {
- name := t.ShortString()
+ name := t.LinkString()
// Use a separate symbol name for Noalg types for #17752.
if TypeHasNoAlg(t) {
name = "noalg." + name
diff --git a/src/cmd/compile/internal/types2/api.go b/src/cmd/compile/internal/types2/api.go
index 2939dcc0bd..b2938b84da 100644
--- a/src/cmd/compile/internal/types2/api.go
+++ b/src/cmd/compile/internal/types2/api.go
@@ -55,6 +55,18 @@ func (err Error) FullError() string {
return fmt.Sprintf("%s: %s", err.Pos, err.Full)
}
+// An ArgumentError holds an error that is associated with an argument.
+type ArgumentError struct {
+ index int
+ error
+}
+
+// Index returns the positional index of the argument associated with the
+// error.
+func (e ArgumentError) Index() int {
+ return e.index
+}
+
// An Importer resolves import paths to Packages.
//
// CAUTION: This interface does not support the import of locally
@@ -125,6 +137,12 @@ type Config struct {
// TODO(gri) Consolidate error messages and remove this flag.
CompilerErrorMessages bool
+ // If AllowTypeLists is set, the type list syntax is permitted
+ // in an interface in addition to the type set syntax.
+ // TODO(gri) Remove once type lists are no longer supported by
+ // the parser.
+ AllowTypeLists bool
+
// If go115UsesCgo is set, the type checker expects the
// _cgo_gotypes.go file generated by running cmd/cgo to be
// provided as a package source file. Qualified identifiers
@@ -355,7 +373,7 @@ func (tv TypeAndValue) HasOk() bool {
// Inferred reports the inferred type arguments and signature
// for a parameterized function call that uses type inference.
type Inferred struct {
- Targs []Type
+ TArgs *TypeList
Sig *Signature
}
@@ -424,11 +442,11 @@ func Implements(V Type, T *Interface) bool {
// Identical reports whether x and y are identical types.
// Receivers of Signature types are ignored.
func Identical(x, y Type) bool {
- return (*Checker)(nil).identical(x, y)
+ return identical(x, y, true, nil)
}
// IdenticalIgnoreTags reports whether x and y are identical types if tags are ignored.
// Receivers of Signature types are ignored.
func IdenticalIgnoreTags(x, y Type) bool {
- return (*Checker)(nil).identicalIgnoreTags(x, y)
+ return identical(x, y, false, nil)
}
diff --git a/src/cmd/compile/internal/types2/api_test.go b/src/cmd/compile/internal/types2/api_test.go
index 873390c1e9..039a6c0e5e 100644
--- a/src/cmd/compile/internal/types2/api_test.go
+++ b/src/cmd/compile/internal/types2/api_test.go
@@ -17,10 +17,6 @@ import (
. "cmd/compile/internal/types2"
)
-func unimplemented() {
- panic("unimplemented")
-}
-
// genericPkg is a source prefix for packages that contain generic code.
const genericPkg = "package generic_"
@@ -329,27 +325,29 @@ func TestTypesInfo(t *testing.T) {
{brokenPkg + `x5; func _() { var x map[string][...]int; x = map[string][...]int{"": {1,2,3}} }`, `x`, `map[string]invalid type`},
// parameterized functions
- {genericPkg + `p0; func f[T any](T); var _ = f[int]`, `f`, `func[T₁ interface{}](T₁)`},
- {genericPkg + `p1; func f[T any](T); var _ = f[int]`, `f[int]`, `func(int)`},
- {genericPkg + `p2; func f[T any](T); func _() { f(42) }`, `f`, `func[T₁ interface{}](T₁)`},
- {genericPkg + `p3; func f[T any](T); func _() { f(42) }`, `f(42)`, `()`},
+ {genericPkg + `p0; func f[T any](T) {}; var _ = f[int]`, `f`, `func[generic_p0.T₁ interface{}](generic_p0.T₁)`},
+ {genericPkg + `p1; func f[T any](T) {}; var _ = f[int]`, `f[int]`, `func(int)`},
+ {genericPkg + `p2; func f[T any](T) {}; func _() { f(42) }`, `f`, `func[generic_p2.T₁ interface{}](generic_p2.T₁)`},
+ {genericPkg + `p3; func f[T any](T) {}; func _() { f(42) }`, `f(42)`, `()`},
// type parameters
{genericPkg + `t0; type t[] int; var _ t`, `t`, `generic_t0.t`}, // t[] is a syntax error that is ignored in this test in favor of t
- {genericPkg + `t1; type t[P any] int; var _ t[int]`, `t`, `generic_t1.t[P₁ interface{}]`},
- {genericPkg + `t2; type t[P interface{}] int; var _ t[int]`, `t`, `generic_t2.t[P₁ interface{}]`},
- {genericPkg + `t3; type t[P, Q interface{}] int; var _ t[int, int]`, `t`, `generic_t3.t[P₁, Q₂ interface{}]`},
- {brokenPkg + `t4; type t[P, Q interface{ m() }] int; var _ t[int, int]`, `t`, `broken_t4.t[P₁, Q₂ interface{m()}]`},
+ {genericPkg + `t1; type t[P any] int; var _ t[int]`, `t`, `generic_t1.t[generic_t1.P₁ interface{}]`},
+ {genericPkg + `t2; type t[P interface{}] int; var _ t[int]`, `t`, `generic_t2.t[generic_t2.P₁ interface{}]`},
+ {genericPkg + `t3; type t[P, Q interface{}] int; var _ t[int, int]`, `t`, `generic_t3.t[generic_t3.P₁, generic_t3.Q₂ interface{}]`},
+ {brokenPkg + `t4; type t[P, Q interface{ m() }] int; var _ t[int, int]`, `t`, `broken_t4.t[broken_t4.P₁, broken_t4.Q₂ interface{m()}]`},
// instantiated types must be sanitized
{genericPkg + `g0; type t[P any] int; var x struct{ f t[int] }; var _ = x.f`, `x.f`, `generic_g0.t[int]`},
// issue 45096
- {genericPkg + `issue45096; func _[T interface{ type int8, int16, int32 }](x T) { _ = x < 0 }`, `0`, `T₁`},
+ {genericPkg + `issue45096; func _[T interface{ ~int8 | ~int16 | ~int32 }](x T) { _ = x < 0 }`, `0`, `generic_issue45096.T₁`},
+
+ // issue 47895
+ {`package p; import "unsafe"; type S struct { f int }; var s S; var _ = unsafe.Offsetof(s.f)`, `s.f`, `int`},
}
for _, test := range tests {
- ResetId() // avoid renumbering of type parameter ids when adding tests
info := Info{Types: make(map[syntax.Expr]TypeAndValue)}
var name string
if strings.HasPrefix(test.src, brokenPkg) {
@@ -390,60 +388,60 @@ func TestInferredInfo(t *testing.T) {
targs []string
sig string
}{
- {genericPkg + `p0; func f[T any](T); func _() { f(42) }`,
+ {genericPkg + `p0; func f[T any](T) {}; func _() { f(42) }`,
`f`,
[]string{`int`},
`func(int)`,
},
- {genericPkg + `p1; func f[T any](T) T; func _() { f('@') }`,
+ {genericPkg + `p1; func f[T any](T) T { panic(0) }; func _() { f('@') }`,
`f`,
[]string{`rune`},
`func(rune) rune`,
},
- {genericPkg + `p2; func f[T any](...T) T; func _() { f(0i) }`,
+ {genericPkg + `p2; func f[T any](...T) T { panic(0) }; func _() { f(0i) }`,
`f`,
[]string{`complex128`},
`func(...complex128) complex128`,
},
- {genericPkg + `p3; func f[A, B, C any](A, *B, []C); func _() { f(1.2, new(string), []byte{}) }`,
+ {genericPkg + `p3; func f[A, B, C any](A, *B, []C) {}; func _() { f(1.2, new(string), []byte{}) }`,
`f`,
[]string{`float64`, `string`, `byte`},
`func(float64, *string, []byte)`,
},
- {genericPkg + `p4; func f[A, B any](A, *B, ...[]B); func _() { f(1.2, new(byte)) }`,
+ {genericPkg + `p4; func f[A, B any](A, *B, ...[]B) {}; func _() { f(1.2, new(byte)) }`,
`f`,
[]string{`float64`, `byte`},
`func(float64, *byte, ...[]byte)`,
},
// we don't know how to translate these but we can type-check them
- {genericPkg + `q0; type T struct{}; func (T) m[P any](P); func _(x T) { x.m(42) }`,
+ {genericPkg + `q0; type T struct{}; func (T) m[P any](P) {}; func _(x T) { x.m(42) }`,
`x.m`,
[]string{`int`},
`func(int)`,
},
- {genericPkg + `q1; type T struct{}; func (T) m[P any](P) P; func _(x T) { x.m(42) }`,
+ {genericPkg + `q1; type T struct{}; func (T) m[P any](P) P { panic(0) }; func _(x T) { x.m(42) }`,
`x.m`,
[]string{`int`},
`func(int) int`,
},
- {genericPkg + `q2; type T struct{}; func (T) m[P any](...P) P; func _(x T) { x.m(42) }`,
+ {genericPkg + `q2; type T struct{}; func (T) m[P any](...P) P { panic(0) }; func _(x T) { x.m(42) }`,
`x.m`,
[]string{`int`},
`func(...int) int`,
},
- {genericPkg + `q3; type T struct{}; func (T) m[A, B, C any](A, *B, []C); func _(x T) { x.m(1.2, new(string), []byte{}) }`,
+ {genericPkg + `q3; type T struct{}; func (T) m[A, B, C any](A, *B, []C) {}; func _(x T) { x.m(1.2, new(string), []byte{}) }`,
`x.m`,
[]string{`float64`, `string`, `byte`},
`func(float64, *string, []byte)`,
},
- {genericPkg + `q4; type T struct{}; func (T) m[A, B any](A, *B, ...[]B); func _(x T) { x.m(1.2, new(byte)) }`,
+ {genericPkg + `q4; type T struct{}; func (T) m[A, B any](A, *B, ...[]B) {}; func _(x T) { x.m(1.2, new(byte)) }`,
`x.m`,
[]string{`float64`, `byte`},
`func(float64, *byte, ...[]byte)`,
},
- {genericPkg + `r0; type T[P any] struct{}; func (_ T[P]) m[Q any](Q); func _[P any](x T[P]) { x.m(42) }`,
+ {genericPkg + `r0; type T[P any] struct{}; func (_ T[P]) m[Q any](Q) {}; func _[P any](x T[P]) { x.m(42) }`,
`x.m`,
[]string{`int`},
`func(int)`,
@@ -455,38 +453,38 @@ func TestInferredInfo(t *testing.T) {
// `func(float64)`,
// },
- {genericPkg + `s1; func f[T any, P interface{type *T}](x T); func _(x string) { f(x) }`,
+ {genericPkg + `s1; func f[T any, P interface{~*T}](x T) {}; func _(x string) { f(x) }`,
`f`,
[]string{`string`, `*string`},
`func(x string)`,
},
- {genericPkg + `s2; func f[T any, P interface{type *T}](x []T); func _(x []int) { f(x) }`,
+ {genericPkg + `s2; func f[T any, P interface{~*T}](x []T) {}; func _(x []int) { f(x) }`,
`f`,
[]string{`int`, `*int`},
`func(x []int)`,
},
- {genericPkg + `s3; type C[T any] interface{type chan<- T}; func f[T any, P C[T]](x []T); func _(x []int) { f(x) }`,
+ {genericPkg + `s3; type C[T any] interface{~chan<- T}; func f[T any, P C[T]](x []T) {}; func _(x []int) { f(x) }`,
`f`,
[]string{`int`, `chan<- int`},
`func(x []int)`,
},
- {genericPkg + `s4; type C[T any] interface{type chan<- T}; func f[T any, P C[T], Q C[[]*P]](x []T); func _(x []int) { f(x) }`,
+ {genericPkg + `s4; type C[T any] interface{~chan<- T}; func f[T any, P C[T], Q C[[]*P]](x []T) {}; func _(x []int) { f(x) }`,
`f`,
[]string{`int`, `chan<- int`, `chan<- []*chan<- int`},
`func(x []int)`,
},
- {genericPkg + `t1; func f[T any, P interface{type *T}]() T; func _() { _ = f[string] }`,
+ {genericPkg + `t1; func f[T any, P interface{~*T}]() T { panic(0) }; func _() { _ = f[string] }`,
`f`,
[]string{`string`, `*string`},
`func() string`,
},
- {genericPkg + `t2; type C[T any] interface{type chan<- T}; func f[T any, P C[T]]() []T; func _() { _ = f[int] }`,
+ {genericPkg + `t2; type C[T any] interface{~chan<- T}; func f[T any, P C[T]]() []T { return nil }; func _() { _ = f[int] }`,
`f`,
[]string{`int`, `chan<- int`},
`func() []int`,
},
- {genericPkg + `t3; type C[T any] interface{type chan<- T}; func f[T any, P C[T], Q C[[]*P]]() []T; func _() { _ = f[int] }`,
+ {genericPkg + `t3; type C[T any] interface{~chan<- T}; func f[T any, P C[T], Q C[[]*P]]() []T { return nil }; func _() { _ = f[int] }`,
`f`,
[]string{`int`, `chan<- int`, `chan<- []*chan<- int`},
`func() []int`,
@@ -502,7 +500,7 @@ func TestInferredInfo(t *testing.T) {
}
// look for inferred type arguments and signature
- var targs []Type
+ var targs *TypeList
var sig *Signature
for call, inf := range info.Inferred {
var fun syntax.Expr
@@ -515,7 +513,7 @@ func TestInferredInfo(t *testing.T) {
panic(fmt.Sprintf("unexpected call expression type %T", call))
}
if syntax.String(fun) == test.fun {
- targs = inf.Targs
+ targs = inf.TArgs
sig = inf.Sig
break
}
@@ -526,11 +524,12 @@ func TestInferredInfo(t *testing.T) {
}
// check that type arguments are correct
- if len(targs) != len(test.targs) {
- t.Errorf("package %s: got %d type arguments; want %d", name, len(targs), len(test.targs))
+ if targs.Len() != len(test.targs) {
+ t.Errorf("package %s: got %d type arguments; want %d", name, targs.Len(), len(test.targs))
continue
}
- for i, targ := range targs {
+ for i := 0; i < targs.Len(); i++ {
+ targ := targs.At(i)
if got := targ.String(); got != test.targs[i] {
t.Errorf("package %s, %d. type argument: got %s; want %s", name, i, got, test.targs[i])
continue
@@ -1166,8 +1165,6 @@ func (m testImporter) Import(path string) (*Package, error) {
}
func TestSelection(t *testing.T) {
- t.Skip("requires fixes around source positions")
-
selections := make(map[*syntax.SelectorExpr]*Selection)
imports := make(testImporter)
@@ -1291,11 +1288,9 @@ func main() {
for e, sel := range selections {
_ = sel.String() // assertion: must not panic
- unimplemented()
- _ = e
- // start := fset.Position(e.Pos()).Offset
- // end := fset.Position(e.End()).Offset
- // syntax := mainSrc[start:end] // (all SelectorExprs are in main, not lib)
+ start := indexFor(mainSrc, syntax.StartPos(e))
+ end := indexFor(mainSrc, syntax.EndPos(e))
+ segment := mainSrc[start:end] // (all SelectorExprs are in main, not lib)
direct := "."
if sel.Indirect() {
@@ -1305,13 +1300,11 @@ func main() {
sel.String(),
fmt.Sprintf("%s%v", direct, sel.Index()),
}
- unimplemented()
- _ = got
- // want := wantOut[syntax]
- // if want != got {
- // t.Errorf("%s: got %q; want %q", syntax, got, want)
- // }
- // delete(wantOut, syntax)
+ want := wantOut[segment]
+ if want != got {
+ t.Errorf("%s: got %q; want %q", segment, got, want)
+ }
+ delete(wantOut, segment)
// We must explicitly assert properties of the
// Signature's receiver since it doesn't participate
@@ -1321,19 +1314,31 @@ func main() {
got := sig.Recv().Type()
want := sel.Recv()
if !Identical(got, want) {
- unimplemented()
- // t.Errorf("%s: Recv() = %s, want %s", syntax, got, want)
+ t.Errorf("%s: Recv() = %s, want %s", segment, got, want)
}
} else if sig != nil && sig.Recv() != nil {
t.Errorf("%s: signature has receiver %s", sig, sig.Recv().Type())
}
}
// Assert that all wantOut entries were used exactly once.
- for syntax := range wantOut {
- t.Errorf("no syntax.Selection found with syntax %q", syntax)
+ for segment := range wantOut {
+ t.Errorf("no syntax.Selection found with syntax %q", segment)
}
}
+// indexFor returns the index into s corresponding to the position pos.
+func indexFor(s string, pos syntax.Pos) int {
+ i, line := 0, 1 // string index and corresponding line
+ target := int(pos.Line())
+ for line < target && i < len(s) {
+ if s[i] == '\n' {
+ line++
+ }
+ i++
+ }
+ return i + int(pos.Col()-1) // columns are 1-based
+}
+
func TestIssue8518(t *testing.T) {
imports := make(testImporter)
conf := Config{
@@ -1568,6 +1573,14 @@ func F(){
}
}
+var nopos syntax.Pos
+
+// newDefined creates a new defined type named T with the given underlying type.
+func newDefined(underlying Type) *Named {
+ tname := NewTypeName(nopos, nil, "T", nil)
+ return NewNamed(tname, underlying, nil)
+}
+
func TestConvertibleTo(t *testing.T) {
for _, test := range []struct {
v, t Type
@@ -1847,3 +1860,88 @@ func f(x T) T { return foo.F(x) }
}
}
}
+
+func TestInstantiate(t *testing.T) {
+ // eventually we like more tests but this is a start
+ const src = genericPkg + "p; type T[P any] *T[P]"
+ pkg, err := pkgFor(".", src, nil)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ // type T should have one type parameter
+ T := pkg.Scope().Lookup("T").Type().(*Named)
+ if n := T.TParams().Len(); n != 1 {
+ t.Fatalf("expected 1 type parameter; found %d", n)
+ }
+
+ // instantiation should succeed (no endless recursion)
+ // even with a nil *Checker
+ res, err := Instantiate(nil, T, []Type{Typ[Int]}, false)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ // instantiated type should point to itself
+ if p := res.Underlying().(*Pointer).Elem(); p != res {
+ t.Fatalf("unexpected result type: %s points to %s", res, p)
+ }
+}
+
+func TestInstantiateErrors(t *testing.T) {
+ tests := []struct {
+ src string // by convention, T must be the type being instantiated
+ targs []Type
+ wantAt int // -1 indicates no error
+ }{
+ {"type T[P interface{~string}] int", []Type{Typ[Int]}, 0},
+ {"type T[P1 interface{int}, P2 interface{~string}] int", []Type{Typ[Int], Typ[Int]}, 1},
+ {"type T[P1 any, P2 interface{~[]P1}] int", []Type{Typ[Int], NewSlice(Typ[String])}, 1},
+ {"type T[P1 interface{~[]P2}, P2 any] int", []Type{NewSlice(Typ[String]), Typ[Int]}, 0},
+ }
+
+ for _, test := range tests {
+ src := genericPkg + "p; " + test.src
+ pkg, err := pkgFor(".", src, nil)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ T := pkg.Scope().Lookup("T").Type().(*Named)
+
+ _, err = Instantiate(nil, T, test.targs, true)
+ if err == nil {
+ t.Fatalf("Instantiate(%v, %v) returned nil error, want non-nil", T, test.targs)
+ }
+
+ gotAt := err.(ArgumentError).Index()
+ if gotAt != test.wantAt {
+ t.Errorf("Instantate(%v, %v): error at index %d, want index %d", T, test.targs, gotAt, test.wantAt)
+ }
+ }
+}
+
+func TestInstanceIdentity(t *testing.T) {
+ imports := make(testImporter)
+ conf := Config{Importer: imports}
+ makePkg := func(src string) {
+ f, err := parseSrc("", src)
+ if err != nil {
+ t.Fatal(err)
+ }
+ name := f.PkgName.Value
+ pkg, err := conf.Check(name, []*syntax.File{f}, nil)
+ if err != nil {
+ t.Fatal(err)
+ }
+ imports[name] = pkg
+ }
+ makePkg(genericPkg + `lib; type T[P any] struct{}`)
+ makePkg(genericPkg + `a; import "generic_lib"; var A generic_lib.T[int]`)
+ makePkg(genericPkg + `b; import "generic_lib"; var B generic_lib.T[int]`)
+ a := imports["generic_a"].Scope().Lookup("A")
+ b := imports["generic_b"].Scope().Lookup("B")
+ if !Identical(a.Type(), b.Type()) {
+ t.Errorf("mismatching types: a.A: %s, b.B: %s", a.Type(), b.Type())
+ }
+}
diff --git a/src/cmd/compile/internal/types2/array.go b/src/cmd/compile/internal/types2/array.go
new file mode 100644
index 0000000000..502d49bc25
--- /dev/null
+++ b/src/cmd/compile/internal/types2/array.go
@@ -0,0 +1,25 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package types2
+
+// An Array represents an array type.
+type Array struct {
+ len int64
+ elem Type
+}
+
+// NewArray returns a new array type for the given element type and length.
+// A negative length indicates an unknown length.
+func NewArray(elem Type, len int64) *Array { return &Array{len: len, elem: elem} }
+
+// Len returns the length of array a.
+// A negative result indicates an unknown length.
+func (a *Array) Len() int64 { return a.len }
+
+// Elem returns element type of array a.
+func (a *Array) Elem() Type { return a.elem }
+
+func (a *Array) Underlying() Type { return a }
+func (a *Array) String() string { return TypeString(a, nil) }
diff --git a/src/cmd/compile/internal/types2/assignments.go b/src/cmd/compile/internal/types2/assignments.go
index 583118c8b2..6184fc2ea5 100644
--- a/src/cmd/compile/internal/types2/assignments.go
+++ b/src/cmd/compile/internal/types2/assignments.go
@@ -68,7 +68,7 @@ func (check *Checker) assignment(x *operand, T Type, context string) {
// x.typ is typed
// A generic (non-instantiated) function value cannot be assigned to a variable.
- if sig := asSignature(x.typ); sig != nil && len(sig.tparams) > 0 {
+ if sig := asSignature(x.typ); sig != nil && sig.TParams().Len() > 0 {
check.errorf(x, "cannot use generic function %s without instantiation in %s", x, context)
}
diff --git a/src/cmd/compile/internal/types2/basic.go b/src/cmd/compile/internal/types2/basic.go
new file mode 100644
index 0000000000..2fd973cafb
--- /dev/null
+++ b/src/cmd/compile/internal/types2/basic.go
@@ -0,0 +1,82 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package types2
+
+// BasicKind describes the kind of basic type.
+type BasicKind int
+
+const (
+ Invalid BasicKind = iota // type is invalid
+
+ // predeclared types
+ Bool
+ Int
+ Int8
+ Int16
+ Int32
+ Int64
+ Uint
+ Uint8
+ Uint16
+ Uint32
+ Uint64
+ Uintptr
+ Float32
+ Float64
+ Complex64
+ Complex128
+ String
+ UnsafePointer
+
+ // types for untyped values
+ UntypedBool
+ UntypedInt
+ UntypedRune
+ UntypedFloat
+ UntypedComplex
+ UntypedString
+ UntypedNil
+
+ // aliases
+ Byte = Uint8
+ Rune = Int32
+)
+
+// BasicInfo is a set of flags describing properties of a basic type.
+type BasicInfo int
+
+// Properties of basic types.
+const (
+ IsBoolean BasicInfo = 1 << iota
+ IsInteger
+ IsUnsigned
+ IsFloat
+ IsComplex
+ IsString
+ IsUntyped
+
+ IsOrdered = IsInteger | IsFloat | IsString
+ IsNumeric = IsInteger | IsFloat | IsComplex
+ IsConstType = IsBoolean | IsNumeric | IsString
+)
+
+// A Basic represents a basic type.
+type Basic struct {
+ kind BasicKind
+ info BasicInfo
+ name string
+}
+
+// Kind returns the kind of basic type b.
+func (b *Basic) Kind() BasicKind { return b.kind }
+
+// Info returns information about properties of basic type b.
+func (b *Basic) Info() BasicInfo { return b.info }
+
+// Name returns the name of basic type b.
+func (b *Basic) Name() string { return b.name }
+
+func (b *Basic) Underlying() Type { return b }
+func (b *Basic) String() string { return TypeString(b, nil) }
diff --git a/src/cmd/compile/internal/types2/builtins.go b/src/cmd/compile/internal/types2/builtins.go
index f90e06f226..87295fe0e7 100644
--- a/src/cmd/compile/internal/types2/builtins.go
+++ b/src/cmd/compile/internal/types2/builtins.go
@@ -46,7 +46,7 @@ func (check *Checker) builtin(x *operand, call *syntax.CallExpr, id builtinId) (
default:
// make argument getter
xlist, _ := check.exprList(call.ArgList, false)
- arg = func(x *operand, i int) { *x = *xlist[i]; x.typ = expand(x.typ) }
+ arg = func(x *operand, i int) { *x = *xlist[i] }
nargs = len(xlist)
// evaluate first argument, if present
if nargs > 0 {
@@ -144,7 +144,7 @@ func (check *Checker) builtin(x *operand, call *syntax.CallExpr, id builtinId) (
mode := invalid
var typ Type
var val constant.Value
- switch typ = implicitArrayDeref(optype(x.typ)); t := typ.(type) {
+ switch typ = arrayPtrDeref(under(x.typ)); t := typ.(type) {
case *Basic:
if isString(t) && id == _Len {
if x.mode == constant_ {
@@ -178,9 +178,9 @@ func (check *Checker) builtin(x *operand, call *syntax.CallExpr, id builtinId) (
mode = value
}
- case *Sum:
- if t.is(func(t Type) bool {
- switch t := under(t).(type) {
+ case *TypeParam:
+ if t.underIs(func(t Type) bool {
+ switch t := arrayPtrDeref(t).(type) {
case *Basic:
if isString(t) && id == _Len {
return true
@@ -212,19 +212,23 @@ func (check *Checker) builtin(x *operand, call *syntax.CallExpr, id builtinId) (
case _Close:
// close(c)
- c := asChan(x.typ)
- if c == nil {
- check.errorf(x, invalidArg+"%s is not a channel", x)
- return
- }
- if c.dir == RecvOnly {
- check.errorf(x, invalidArg+"%s must not be a receive-only channel", x)
+ if !underIs(x.typ, func(u Type) bool {
+ uch, _ := u.(*Chan)
+ if uch == nil {
+ check.errorf(x, invalidOp+"cannot close non-channel %s", x)
+ return false
+ }
+ if uch.dir == RecvOnly {
+ check.errorf(x, invalidOp+"cannot close receive-only channel %s", x)
+ return false
+ }
+ return true
+ }) {
return
}
-
x.mode = novalue
if check.Types != nil {
- check.recordBuiltinType(call.Fun, makeSig(nil, c))
+ check.recordBuiltinType(call.Fun, makeSig(nil, x.typ))
}
case _Complex:
@@ -281,7 +285,7 @@ func (check *Checker) builtin(x *operand, call *syntax.CallExpr, id builtinId) (
}
// both argument types must be identical
- if !check.identical(x.typ, y.typ) {
+ if !Identical(x.typ, y.typ) {
check.errorf(x, invalidOp+"%v (mismatched types %s and %s)", call, x.typ, y.typ)
return
}
@@ -332,13 +336,15 @@ func (check *Checker) builtin(x *operand, call *syntax.CallExpr, id builtinId) (
return
}
var src Type
- switch t := optype(y.typ).(type) {
+ switch t := under(y.typ).(type) {
case *Basic:
if isString(y.typ) {
src = universeByte
}
case *Slice:
src = t.elem
+ case *TypeParam:
+ check.error(x, "copy on generic operands not yet implemented")
}
if dst == nil || src == nil {
@@ -346,7 +352,7 @@ func (check *Checker) builtin(x *operand, call *syntax.CallExpr, id builtinId) (
return
}
- if !check.identical(dst, src) {
+ if !Identical(dst, src) {
check.errorf(x, invalidArg+"arguments to copy %s and %s have different element types %s and %s", x, &y, dst, src)
return
}
@@ -358,25 +364,40 @@ func (check *Checker) builtin(x *operand, call *syntax.CallExpr, id builtinId) (
x.typ = Typ[Int]
case _Delete:
- // delete(m, k)
- m := asMap(x.typ)
- if m == nil {
- check.errorf(x, invalidArg+"%s is not a map", x)
+ // delete(map_, key)
+ // map_ must be a map type or a type parameter describing map types.
+ // The key cannot be a type parameter for now.
+ map_ := x.typ
+ var key Type
+ if !underIs(map_, func(u Type) bool {
+ map_, _ := u.(*Map)
+ if map_ == nil {
+ check.errorf(x, invalidArg+"%s is not a map", x)
+ return false
+ }
+ if key != nil && !Identical(map_.key, key) {
+ check.errorf(x, invalidArg+"maps of %s must have identical key types", x)
+ return false
+ }
+ key = map_.key
+ return true
+ }) {
return
}
+
arg(x, 1) // k
if x.mode == invalid {
return
}
- check.assignment(x, m.key, "argument to delete")
+ check.assignment(x, key, "argument to delete")
if x.mode == invalid {
return
}
x.mode = novalue
if check.Types != nil {
- check.recordBuiltinType(call.Fun, makeSig(nil, m, m.key))
+ check.recordBuiltinType(call.Fun, makeSig(nil, map_, key))
}
case _Imag, _Real:
@@ -451,39 +472,21 @@ func (check *Checker) builtin(x *operand, call *syntax.CallExpr, id builtinId) (
return
}
- min, max := -1, 10
- var valid func(t Type) bool
- valid = func(t Type) bool {
- var m int
- switch t := optype(t).(type) {
- case *Slice:
- m = 2
- case *Map, *Chan:
- m = 1
- case *Sum:
- return t.is(valid)
- default:
- return false
- }
- if m > min {
- min = m
- }
- if m+1 < max {
- max = m + 1
- }
- return true
- }
-
- if !valid(T) {
+ var min int // minimum number of arguments
+ switch optype(T).(type) {
+ case *Slice:
+ min = 2
+ case *Map, *Chan:
+ min = 1
+ case *top:
+ check.errorf(arg0, invalidArg+"cannot make %s; type parameter has no structural type", arg0)
+ return
+ default:
check.errorf(arg0, invalidArg+"cannot make %s; type must be slice, map, or channel", arg0)
return
}
- if nargs < min || max < nargs {
- if min == max {
- check.errorf(call, "%v expects %d arguments; found %d", call, min, nargs)
- } else {
- check.errorf(call, "%v expects %d or %d arguments; found %d", call, min, max, nargs)
- }
+ if nargs < min || min+1 < nargs {
+ check.errorf(call, invalidOp+"%v expects %d or %d arguments; found %d", call, min, min+1, nargs)
return
}
@@ -603,19 +606,22 @@ func (check *Checker) builtin(x *operand, call *syntax.CallExpr, id builtinId) (
case _Alignof:
// unsafe.Alignof(x T) uintptr
- if asTypeParam(x.typ) != nil {
- check.errorf(call, invalidOp+"unsafe.Alignof undefined for %s", x)
- return
- }
check.assignment(x, nil, "argument to unsafe.Alignof")
if x.mode == invalid {
return
}
- x.mode = constant_
- x.val = constant.MakeInt64(check.conf.alignof(x.typ))
+ if hasVarSize(x.typ) {
+ x.mode = value
+ if check.Types != nil {
+ check.recordBuiltinType(call.Fun, makeSig(Typ[Uintptr], x.typ))
+ }
+ } else {
+ x.mode = constant_
+ x.val = constant.MakeInt64(check.conf.alignof(x.typ))
+ // result is constant - no need to record signature
+ }
x.typ = Typ[Uintptr]
- // result is constant - no need to record signature
case _Offsetof:
// unsafe.Offsetof(x T) uintptr, where x must be a selector
@@ -635,7 +641,7 @@ func (check *Checker) builtin(x *operand, call *syntax.CallExpr, id builtinId) (
base := derefStructPtr(x.typ)
sel := selx.Sel.Value
- obj, index, indirect := check.lookupFieldOrMethod(base, false, check.pkg, sel)
+ obj, index, indirect := LookupFieldOrMethod(base, false, check.pkg, sel)
switch obj.(type) {
case nil:
check.errorf(x, invalidArg+"%s has no single field %s", base, sel)
@@ -653,30 +659,52 @@ func (check *Checker) builtin(x *operand, call *syntax.CallExpr, id builtinId) (
return
}
- // TODO(gri) Should we pass x.typ instead of base (and indirect report if derefStructPtr indirected)?
+ // TODO(gri) Should we pass x.typ instead of base (and have indirect report if derefStructPtr indirected)?
check.recordSelection(selx, FieldVal, base, obj, index, false)
- offs := check.conf.offsetof(base, index)
- x.mode = constant_
- x.val = constant.MakeInt64(offs)
+ // record the selector expression (was bug - issue #47895)
+ {
+ mode := value
+ if x.mode == variable || indirect {
+ mode = variable
+ }
+ check.record(&operand{mode, selx, obj.Type(), nil, 0})
+ }
+
+ // The field offset is considered a variable even if the field is declared before
+ // the part of the struct which is variable-sized. This makes both the rules
+ // simpler and also permits (or at least doesn't prevent) a compiler from re-
+ // arranging struct fields if it wanted to.
+ if hasVarSize(base) {
+ x.mode = value
+ if check.Types != nil {
+ check.recordBuiltinType(call.Fun, makeSig(Typ[Uintptr], obj.Type()))
+ }
+ } else {
+ x.mode = constant_
+ x.val = constant.MakeInt64(check.conf.offsetof(base, index))
+ // result is constant - no need to record signature
+ }
x.typ = Typ[Uintptr]
- // result is constant - no need to record signature
case _Sizeof:
// unsafe.Sizeof(x T) uintptr
- if asTypeParam(x.typ) != nil {
- check.errorf(call, invalidOp+"unsafe.Sizeof undefined for %s", x)
- return
- }
check.assignment(x, nil, "argument to unsafe.Sizeof")
if x.mode == invalid {
return
}
- x.mode = constant_
- x.val = constant.MakeInt64(check.conf.sizeof(x.typ))
+ if hasVarSize(x.typ) {
+ x.mode = value
+ if check.Types != nil {
+ check.recordBuiltinType(call.Fun, makeSig(Typ[Uintptr], x.typ))
+ }
+ } else {
+ x.mode = constant_
+ x.val = constant.MakeInt64(check.conf.sizeof(x.typ))
+ // result is constant - no need to record signature
+ }
x.typ = Typ[Uintptr]
- // result is constant - no need to record signature
case _Slice:
// unsafe.Slice(ptr *T, len IntegerType) []T
@@ -748,6 +776,25 @@ func (check *Checker) builtin(x *operand, call *syntax.CallExpr, id builtinId) (
return true
}
+// hasVarSize reports if the size of type t is variable due to type parameters.
+func hasVarSize(t Type) bool {
+ switch t := under(t).(type) {
+ case *Array:
+ return hasVarSize(t.elem)
+ case *Struct:
+ for _, f := range t.fields {
+ if hasVarSize(f.typ) {
+ return true
+ }
+ }
+ case *TypeParam:
+ return true
+ case *Named, *Union, *top:
+ unreachable()
+ }
+ return false
+}
+
// applyTypeFunc applies f to x. If x is a type parameter,
// the result is a type parameter constrained by an new
// interface bound. The type bounds for that interface
@@ -759,10 +806,10 @@ func (check *Checker) applyTypeFunc(f func(Type) Type, x Type) Type {
if tp := asTypeParam(x); tp != nil {
// Test if t satisfies the requirements for the argument
// type and collect possible result types at the same time.
- var rtypes []Type
- if !tp.Bound().is(func(x Type) bool {
- if r := f(x); r != nil {
- rtypes = append(rtypes, r)
+ var terms []*Term
+ if !tp.iface().typeSet().is(func(t *term) bool {
+ if r := f(t.typ); r != nil {
+ terms = append(terms, NewTerm(t.tilde, r))
return true
}
return false
@@ -775,11 +822,12 @@ func (check *Checker) applyTypeFunc(f func(Type) Type, x Type) Type {
// uses of real() where the result is used to
// define type and initialize a variable?
- // construct a suitable new type parameter
- tpar := NewTypeName(nopos, nil /* = Universe pkg */, "<type parameter>", nil)
- ptyp := check.NewTypeParam(tpar, 0, &emptyInterface) // assigns type to tpar as a side-effect
- tsum := NewSum(rtypes)
- ptyp.bound = &Interface{types: tsum, allMethods: markComplete, allTypes: tsum}
+ // Construct a suitable new type parameter for the sum type. The
+ // type param is placed in the current package so export/import
+ // works as expected.
+ tpar := NewTypeName(nopos, check.pkg, "<type parameter>", nil)
+ ptyp := check.NewTypeParam(tpar, NewInterfaceType(nil, []Type{NewUnion(terms)})) // assigns type to tpar as a side-effect
+ ptyp.index = tp.index
return ptyp
}
@@ -803,10 +851,9 @@ func makeSig(res Type, args ...Type) *Signature {
return &Signature{params: params, results: result}
}
-// implicitArrayDeref returns A if typ is of the form *A and A is an array;
+// arrayPtrDeref returns A if typ is of the form *A and A is an array;
// otherwise it returns typ.
-//
-func implicitArrayDeref(typ Type) Type {
+func arrayPtrDeref(typ Type) Type {
if p, ok := typ.(*Pointer); ok {
if a := asArray(p.base); a != nil {
return a
diff --git a/src/cmd/compile/internal/types2/builtins_test.go b/src/cmd/compile/internal/types2/builtins_test.go
index 82c786b86e..52dbba1cb9 100644
--- a/src/cmd/compile/internal/types2/builtins_test.go
+++ b/src/cmd/compile/internal/types2/builtins_test.go
@@ -7,6 +7,7 @@ package types2_test
import (
"cmd/compile/internal/syntax"
"fmt"
+ "strings"
"testing"
. "cmd/compile/internal/types2"
@@ -111,12 +112,15 @@ var builtinCalls = []struct {
{"Alignof", `_ = unsafe.Alignof(0)`, `invalid type`}, // constant
{"Alignof", `var x struct{}; _ = unsafe.Alignof(x)`, `invalid type`}, // constant
+ {"Alignof", `var x P; _ = unsafe.Alignof(x)`, `func(p.P₁) uintptr`},
{"Offsetof", `var x struct{f bool}; _ = unsafe.Offsetof(x.f)`, `invalid type`}, // constant
{"Offsetof", `var x struct{_ int; f bool}; _ = unsafe.Offsetof((&x).f)`, `invalid type`}, // constant
+ {"Offsetof", `var x struct{_ int; f P}; _ = unsafe.Offsetof((&x).f)`, `func(p.P₁) uintptr`},
{"Sizeof", `_ = unsafe.Sizeof(0)`, `invalid type`}, // constant
{"Sizeof", `var x struct{}; _ = unsafe.Sizeof(x)`, `invalid type`}, // constant
+ {"Sizeof", `var x P; _ = unsafe.Sizeof(x)`, `func(p.P₁) uintptr`},
{"Slice", `var p *int; _ = unsafe.Slice(p, 1)`, `func(*int, int) []int`},
{"Slice", `var p *byte; var n uintptr; _ = unsafe.Slice(p, n)`, `func(*byte, uintptr) []byte`},
@@ -149,9 +153,14 @@ func TestBuiltinSignatures(t *testing.T) {
}
}
+func parseGenericSrc(path, src string) (*syntax.File, error) {
+ errh := func(error) {} // dummy error handler so that parsing continues in presence of errors
+ return syntax.Parse(syntax.NewFileBase(path), strings.NewReader(src), errh, nil, syntax.AllowGenerics)
+}
+
func testBuiltinSignature(t *testing.T, name, src0, want string) {
- src := fmt.Sprintf(`package p; import "unsafe"; type _ unsafe.Pointer /* use unsafe */; func _() { %s }`, src0)
- f, err := parseSrc("", src)
+ src := fmt.Sprintf(`package p; import "unsafe"; type _ unsafe.Pointer /* use unsafe */; func _[P any]() { %s }`, src0)
+ f, err := parseGenericSrc("", src)
if err != nil {
t.Errorf("%s: %s", src0, err)
return
diff --git a/src/cmd/compile/internal/types2/call.go b/src/cmd/compile/internal/types2/call.go
index 6d149340b2..538fdc0fb7 100644
--- a/src/cmd/compile/internal/types2/call.go
+++ b/src/cmd/compile/internal/types2/call.go
@@ -26,7 +26,7 @@ func (check *Checker) funcInst(x *operand, inst *syntax.IndexExpr) {
// check number of type arguments (got) vs number of type parameters (want)
sig := x.typ.(*Signature)
- got, want := len(targs), len(sig.tparams)
+ got, want := len(targs), sig.TParams().Len()
if !useConstraintTypeInference && got != want || got > want {
check.errorf(xlist[got-1], "got %d type arguments but want %d", got, want)
x.mode = invalid
@@ -37,7 +37,7 @@ func (check *Checker) funcInst(x *operand, inst *syntax.IndexExpr) {
// if we don't have enough type arguments, try type inference
inferred := false
if got < want {
- targs = check.infer(inst.Pos(), sig.tparams, targs, nil, nil, true)
+ targs = check.infer(inst.Pos(), sig.TParams().list(), targs, nil, nil, true)
if targs == nil {
// error was already reported
x.mode = invalid
@@ -57,7 +57,7 @@ func (check *Checker) funcInst(x *operand, inst *syntax.IndexExpr) {
// instantiate function signature
res := check.instantiate(x.Pos(), sig, targs, poslist).(*Signature)
- assert(res.tparams == nil) // signature is not generic anymore
+ assert(res.TParams().Len() == 0) // signature is not generic anymore
if inferred {
check.recordInferred(inst, targs, res)
}
@@ -99,7 +99,6 @@ func (check *Checker) callExpr(x *operand, call *syntax.CallExpr) exprKind {
check.expr(x, call.ArgList[0])
if x.mode != invalid {
if t := asInterface(T); t != nil {
- check.completeInterface(nopos, t)
if t.IsConstraint() {
check.errorf(call, "cannot use interface %s in conversion (contains type list or is comparable)", T)
break
@@ -156,7 +155,7 @@ func (check *Checker) callExpr(x *operand, call *syntax.CallExpr) exprKind {
assert(len(targs) == len(xlist))
// check number of type arguments (got) vs number of type parameters (want)
- got, want := len(targs), len(sig.tparams)
+ got, want := len(targs), sig.TParams().Len()
if got > want {
check.errorf(xlist[want], "got %d type arguments but want %d", got, want)
check.use(call.ArgList...)
@@ -190,7 +189,7 @@ func (check *Checker) callExpr(x *operand, call *syntax.CallExpr) exprKind {
// if type inference failed, a parametrized result must be invalidated
// (operands cannot have a parametrized type)
- if x.mode == value && len(sig.tparams) > 0 && isParameterized(sig.tparams, x.typ) {
+ if x.mode == value && sig.TParams().Len() > 0 && isParameterized(sig.TParams().list(), x.typ) {
x.mode = invalid
}
@@ -318,24 +317,24 @@ func (check *Checker) arguments(call *syntax.CallExpr, sig *Signature, targs []T
}
// infer type arguments and instantiate signature if necessary
- if len(sig.tparams) > 0 {
+ if sig.TParams().Len() > 0 {
// TODO(gri) provide position information for targs so we can feed
// it to the instantiate call for better error reporting
- targs = check.infer(call.Pos(), sig.tparams, targs, sigParams, args, true)
+ targs := check.infer(call.Pos(), sig.TParams().list(), targs, sigParams, args, true)
if targs == nil {
return // error already reported
}
// compute result signature
rsig = check.instantiate(call.Pos(), sig, targs, nil).(*Signature)
- assert(rsig.tparams == nil) // signature is not generic anymore
+ assert(rsig.TParams().Len() == 0) // signature is not generic anymore
check.recordInferred(call, targs, rsig)
// Optimization: Only if the parameter list was adjusted do we
// need to compute it from the adjusted list; otherwise we can
// simply use the result signature's parameter list.
if adjusted {
- sigParams = check.subst(call.Pos(), sigParams, makeSubstMap(sig.tparams, targs)).(*Tuple)
+ sigParams = check.subst(call.Pos(), sigParams, makeSubstMap(sig.TParams().list(), targs), nil).(*Tuple)
} else {
sigParams = rsig.params
}
@@ -468,7 +467,7 @@ func (check *Checker) selector(x *operand, e *syntax.SelectorExpr) {
check.instantiatedOperand(x)
- obj, index, indirect = check.lookupFieldOrMethod(x.typ, x.mode == variable, check.pkg, sel)
+ obj, index, indirect = LookupFieldOrMethod(x.typ, x.mode == variable, check.pkg, sel)
if obj == nil {
switch {
case index != nil:
@@ -480,11 +479,10 @@ func (check *Checker) selector(x *operand, e *syntax.SelectorExpr) {
var why string
if tpar := asTypeParam(x.typ); tpar != nil {
// Type parameter bounds don't specify fields, so don't mention "field".
- switch obj := tpar.Bound().obj.(type) {
- case nil:
+ if tname := tpar.iface().obj; tname != nil {
+ why = check.sprintf("interface %s has no method %s", tname.name, sel)
+ } else {
why = check.sprintf("type bound for %s has no method %s", x.typ, sel)
- case *TypeName:
- why = check.sprintf("interface %s has no method %s", obj.name, sel)
}
} else {
why = check.sprintf("type %s has no field or method %s", x.typ, sel)
@@ -498,7 +496,7 @@ func (check *Checker) selector(x *operand, e *syntax.SelectorExpr) {
} else {
changeCase = string(unicode.ToUpper(r)) + sel[1:]
}
- if obj, _, _ = check.lookupFieldOrMethod(x.typ, x.mode == variable, check.pkg, changeCase); obj != nil {
+ if obj, _, _ = LookupFieldOrMethod(x.typ, x.mode == variable, check.pkg, changeCase); obj != nil {
why += ", but does have " + changeCase
}
}
@@ -518,7 +516,7 @@ func (check *Checker) selector(x *operand, e *syntax.SelectorExpr) {
// the signature accordingly.
// TODO(gri) factor this code out
sig := m.typ.(*Signature)
- if len(sig.rparams) > 0 {
+ if sig.RParams().Len() > 0 {
// For inference to work, we must use the receiver type
// matching the receiver in the actual method declaration.
// If the method is embedded, the matching receiver is the
@@ -547,7 +545,7 @@ func (check *Checker) selector(x *operand, e *syntax.SelectorExpr) {
// the receiver type arguments here, the receiver must be be otherwise invalid
// and an error has been reported elsewhere.
arg := operand{mode: variable, expr: x.expr, typ: recv}
- targs := check.infer(m.pos, sig.rparams, nil, NewTuple(sig.recv), []*operand{&arg}, false /* no error reporting */)
+ targs := check.infer(m.pos, sig.RParams().list(), nil, NewTuple(sig.recv), []*operand{&arg}, false /* no error reporting */)
//check.dump("### inferred targs = %s", targs)
if targs == nil {
// We may reach here if there were other errors (see issue #40056).
@@ -557,7 +555,7 @@ func (check *Checker) selector(x *operand, e *syntax.SelectorExpr) {
// (If we modify m, some tests will fail; possibly because the m is in use.)
// TODO(gri) investigate and provide a correct explanation here
copy := *m
- copy.typ = check.subst(e.Pos(), m.typ, makeSubstMap(sig.rparams, targs))
+ copy.typ = check.subst(e.Pos(), m.typ, makeSubstMap(sig.RParams().list(), targs), nil)
obj = &copy
}
// TODO(gri) we also need to do substitution for parameterized interface methods
@@ -576,17 +574,37 @@ func (check *Checker) selector(x *operand, e *syntax.SelectorExpr) {
check.recordSelection(e, MethodExpr, x.typ, m, index, indirect)
- // the receiver type becomes the type of the first function
- // argument of the method expression's function type
- var params []*Var
sig := m.typ.(*Signature)
+ if sig.recv == nil {
+ check.error(e, "illegal cycle in method declaration")
+ goto Error
+ }
+
+ // The receiver type becomes the type of the first function
+ // argument of the method expression's function type.
+ var params []*Var
if sig.params != nil {
params = sig.params.vars
}
+ // Be consistent about named/unnamed parameters. This is not needed
+ // for type-checking, but the newly constructed signature may appear
+ // in an error message and then have mixed named/unnamed parameters.
+ // (An alternative would be to not print parameter names in errors,
+ // but it's useful to see them; this is cheap and method expressions
+ // are rare.)
+ name := ""
+ if len(params) > 0 && params[0].name != "" {
+ // name needed
+ name = sig.recv.name
+ if name == "" {
+ name = "_"
+ }
+ }
+ params = append([]*Var{NewVar(sig.recv.pos, sig.recv.pkg, name, x.typ)}, params...)
x.mode = value
x.typ = &Signature{
tparams: sig.tparams,
- params: NewTuple(append([]*Var{NewVar(nopos, check.pkg, "_", x.typ)}, params...)...),
+ params: NewTuple(params...),
results: sig.results,
variadic: sig.variadic,
}
diff --git a/src/cmd/compile/internal/types2/chan.go b/src/cmd/compile/internal/types2/chan.go
new file mode 100644
index 0000000000..77650dfb09
--- /dev/null
+++ b/src/cmd/compile/internal/types2/chan.go
@@ -0,0 +1,35 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package types2
+
+// A Chan represents a channel type.
+type Chan struct {
+ dir ChanDir
+ elem Type
+}
+
+// A ChanDir value indicates a channel direction.
+type ChanDir int
+
+// The direction of a channel is indicated by one of these constants.
+const (
+ SendRecv ChanDir = iota
+ SendOnly
+ RecvOnly
+)
+
+// NewChan returns a new channel type for the given direction and element type.
+func NewChan(dir ChanDir, elem Type) *Chan {
+ return &Chan{dir: dir, elem: elem}
+}
+
+// Dir returns the direction of channel c.
+func (c *Chan) Dir() ChanDir { return c.dir }
+
+// Elem returns the element type of channel c.
+func (c *Chan) Elem() Type { return c.elem }
+
+func (c *Chan) Underlying() Type { return c }
+func (c *Chan) String() string { return TypeString(c, nil) }
diff --git a/src/cmd/compile/internal/types2/check.go b/src/cmd/compile/internal/types2/check.go
index 8d6cd1edab..4226b4de82 100644
--- a/src/cmd/compile/internal/types2/check.go
+++ b/src/cmd/compile/internal/types2/check.go
@@ -71,7 +71,7 @@ type importKey struct {
// A dotImportKey describes a dot-imported object in the given scope.
type dotImportKey struct {
scope *Scope
- obj Object
+ name string
}
// A Checker maintains the state of the type checker.
@@ -82,11 +82,11 @@ type Checker struct {
conf *Config
pkg *Package
*Info
- version version // accepted language version
- objMap map[Object]*declInfo // maps package-level objects and (non-interface) methods to declaration info
- impMap map[importKey]*Package // maps (import path, source directory) to (complete or fake) package
- posMap map[*Interface][]syntax.Pos // maps interface types to lists of embedded interface positions
- typMap map[string]*Named // maps an instantiated named type hash to a *Named type
+ version version // accepted language version
+ nextID uint64 // unique Id for type parameters (first valid Id is 1)
+ objMap map[Object]*declInfo // maps package-level objects and (non-interface) methods to declaration info
+ impMap map[importKey]*Package // maps (import path, source directory) to (complete or fake) package
+ typMap map[string]*Named // maps an instantiated named type hash to a *Named type
// pkgPathMap maps package names to the set of distinct import paths we've
// seen for that name, anywhere in the import graph. It is used for
@@ -188,7 +188,6 @@ func NewChecker(conf *Config, pkg *Package, info *Info) *Checker {
version: version,
objMap: make(map[Object]*declInfo),
impMap: make(map[importKey]*Package),
- posMap: make(map[*Interface][]syntax.Pos),
typMap: make(map[string]*Named),
}
}
@@ -283,11 +282,6 @@ func (check *Checker) checkFiles(files []*syntax.File) (err error) {
print("== recordUntyped ==")
check.recordUntyped()
- if check.Info != nil {
- print("== sanitizeInfo ==")
- sanitizeInfo(check.Info)
- }
-
check.pkg.complete = true
// no longer needed - release memory
@@ -422,7 +416,7 @@ func (check *Checker) recordInferred(call syntax.Expr, targs []Type, sig *Signat
assert(call != nil)
assert(sig != nil)
if m := check.Inferred; m != nil {
- m[call] = Inferred{targs, sig}
+ m[call] = Inferred{NewTypeList(targs), sig}
}
}
diff --git a/src/cmd/compile/internal/types2/conversions.go b/src/cmd/compile/internal/types2/conversions.go
index 30201e2b7f..6c26a4c446 100644
--- a/src/cmd/compile/internal/types2/conversions.go
+++ b/src/cmd/compile/internal/types2/conversions.go
@@ -93,7 +93,7 @@ func (x *operand) convertibleTo(check *Checker, T Type) bool {
V := x.typ
Vu := under(V)
Tu := under(T)
- if check.identicalIgnoreTags(Vu, Tu) {
+ if IdenticalIgnoreTags(Vu, Tu) {
return true
}
@@ -101,7 +101,7 @@ func (x *operand) convertibleTo(check *Checker, T Type) bool {
// have identical underlying types if tags are ignored"
if V, ok := V.(*Pointer); ok {
if T, ok := T.(*Pointer); ok {
- if check.identicalIgnoreTags(under(V.base), under(T.base)) {
+ if IdenticalIgnoreTags(under(V.base), under(T.base)) {
return true
}
}
@@ -142,7 +142,7 @@ func (x *operand) convertibleTo(check *Checker, T Type) bool {
if s := asSlice(V); s != nil {
if p := asPointer(T); p != nil {
if a := asArray(p.Elem()); a != nil {
- if check.identical(s.Elem(), a.Elem()) {
+ if Identical(s.Elem(), a.Elem()) {
if check == nil || check.allowVersion(check.pkg, 1, 17) {
return true
}
diff --git a/src/cmd/compile/internal/types2/decl.go b/src/cmd/compile/internal/types2/decl.go
index 1333e4c0ec..342e1090de 100644
--- a/src/cmd/compile/internal/types2/decl.go
+++ b/src/cmd/compile/internal/types2/decl.go
@@ -317,6 +317,8 @@ func (check *Checker) validType(typ Type, path []Object) typeInfo {
}
case *Named:
+ t.expand(check.typMap)
+
// don't touch the type if it is from a different package or the Universe scope
// (doing so would lead to a race condition - was issue #35049)
if t.obj.pkg != check.pkg {
@@ -338,7 +340,7 @@ func (check *Checker) validType(typ Type, path []Object) typeInfo {
// cycle detected
for i, tn := range path {
if t.obj.pkg != check.pkg {
- panic("internal error: type cycle via package-external type")
+ panic("type cycle via package-external type")
}
if tn == t.obj {
check.cycleError(path[i:])
@@ -346,12 +348,9 @@ func (check *Checker) validType(typ Type, path []Object) typeInfo {
return t.info
}
}
- panic("internal error: cycle start not found")
+ panic("cycle start not found")
}
return t.info
-
- case *instance:
- return check.validType(t.expand(), path)
}
return valid
@@ -515,75 +514,6 @@ func (check *Checker) varDecl(obj *Var, lhs []*Var, typ, init syntax.Expr) {
check.initVars(lhs, []syntax.Expr{init}, nopos)
}
-// under returns the expanded underlying type of n0; possibly by following
-// forward chains of named types. If an underlying type is found, resolve
-// the chain by setting the underlying type for each defined type in the
-// chain before returning it. If no underlying type is found or a cycle
-// is detected, the result is Typ[Invalid]. If a cycle is detected and
-// n0.check != nil, the cycle is reported.
-func (n0 *Named) under() Type {
- u := n0.underlying
- if u == nil {
- return Typ[Invalid]
- }
-
- // If the underlying type of a defined type is not a defined
- // type, then that is the desired underlying type.
- n := asNamed(u)
- if n == nil {
- return u // common case
- }
-
- // Otherwise, follow the forward chain.
- seen := map[*Named]int{n0: 0}
- path := []Object{n0.obj}
- for {
- u = n.underlying
- if u == nil {
- u = Typ[Invalid]
- break
- }
- n1 := asNamed(u)
- if n1 == nil {
- break // end of chain
- }
-
- seen[n] = len(seen)
- path = append(path, n.obj)
- n = n1
-
- if i, ok := seen[n]; ok {
- // cycle
- // TODO(gri) revert this to a method on Checker. Having a possibly
- // nil Checker on Named and TypeParam is too subtle.
- if n0.check != nil {
- n0.check.cycleError(path[i:])
- }
- u = Typ[Invalid]
- break
- }
- }
-
- for n := range seen {
- // We should never have to update the underlying type of an imported type;
- // those underlying types should have been resolved during the import.
- // Also, doing so would lead to a race condition (was issue #31749).
- // Do this check always, not just in debug more (it's cheap).
- if n0.check != nil && n.obj.pkg != n0.check.pkg {
- panic("internal error: imported type with unresolved underlying type")
- }
- n.underlying = u
- }
-
- return u
-}
-
-func (n *Named) setUnderlying(typ Type) {
- if n != nil {
- n.underlying = typ
- }
-}
-
func (check *Checker) typeDecl(obj *TypeName, tdecl *syntax.TypeDecl, def *Named) {
assert(obj.typ == nil)
@@ -599,8 +529,8 @@ func (check *Checker) typeDecl(obj *TypeName, tdecl *syntax.TypeDecl, def *Named
alias = false
}
+ // alias declaration
if alias {
- // type alias declaration
if !check.allowVersion(check.pkg, 1, 9) {
if check.conf.CompilerErrorMessages {
check.error(tdecl, "type aliases only supported as of -lang=go1.9")
@@ -611,111 +541,94 @@ func (check *Checker) typeDecl(obj *TypeName, tdecl *syntax.TypeDecl, def *Named
obj.typ = Typ[Invalid]
obj.typ = check.anyType(tdecl.Type)
+ return
+ }
- } else {
- // defined type declaration
-
- named := check.newNamed(obj, nil, nil, nil, nil)
- def.setUnderlying(named)
-
- if tdecl.TParamList != nil {
- check.openScope(tdecl, "type parameters")
- defer check.closeScope()
- named.tparams = check.collectTypeParams(tdecl.TParamList)
- }
-
- // determine underlying type of named
- named.fromRHS = check.definedType(tdecl.Type, named)
+ // type definition or generic type declaration
+ named := check.newNamed(obj, nil, nil, nil, nil)
+ def.setUnderlying(named)
- // The underlying type of named may be itself a named type that is
- // incomplete:
- //
- // type (
- // A B
- // B *C
- // C A
- // )
- //
- // The type of C is the (named) type of A which is incomplete,
- // and which has as its underlying type the named type B.
- // Determine the (final, unnamed) underlying type by resolving
- // any forward chain.
- // TODO(gri) Investigate if we can just use named.fromRHS here
- // and rely on lazy computation of the underlying type.
- named.underlying = under(named)
+ if tdecl.TParamList != nil {
+ check.openScope(tdecl, "type parameters")
+ defer check.closeScope()
+ named.tparams = check.collectTypeParams(tdecl.TParamList)
}
+ // determine underlying type of named
+ named.fromRHS = check.definedType(tdecl.Type, named)
+ assert(named.fromRHS != nil)
+ // The underlying type of named may be itself a named type that is
+ // incomplete:
+ //
+ // type (
+ // A B
+ // B *C
+ // C A
+ // )
+ //
+ // The type of C is the (named) type of A which is incomplete,
+ // and which has as its underlying type the named type B.
+ // Determine the (final, unnamed) underlying type by resolving
+ // any forward chain.
+ // TODO(gri) Investigate if we can just use named.fromRHS here
+ // and rely on lazy computation of the underlying type.
+ named.underlying = under(named)
+
+ // If the RHS is a type parameter, it must be from this type declaration.
+ if tpar, _ := named.underlying.(*TypeParam); tpar != nil && tparamIndex(named.TParams().list(), tpar) < 0 {
+ check.errorf(tdecl.Type, "cannot use function type parameter %s as RHS in type declaration", tpar)
+ named.underlying = Typ[Invalid]
+ }
}
-func (check *Checker) collectTypeParams(list []*syntax.Field) (tparams []*TypeName) {
- // Type parameter lists should not be empty. The parser will
- // complain but we still may get an incorrect AST: ignore it.
- if len(list) == 0 {
- return
- }
+func (check *Checker) collectTypeParams(list []*syntax.Field) *TParamList {
+ tparams := make([]*TypeParam, len(list))
- // Declare type parameters up-front, with empty interface as type bound.
+ // Declare type parameters up-front.
// The scope of type parameters starts at the beginning of the type parameter
- // list (so we can have mutually recursive parameterized interfaces).
- for _, f := range list {
- tparams = check.declareTypeParam(tparams, f.Name)
+ // list (so we can have mutually recursive parameterized type bounds).
+ for i, f := range list {
+ tparams[i] = check.declareTypeParam(f.Name)
}
var bound Type
- for i, j := 0, 0; i < len(list); i = j {
- f := list[i]
-
- // determine the range of type parameters list[i:j] with identical type bound
- // (declared as in (type a, b, c B))
- j = i + 1
- for j < len(list) && list[j].Type == f.Type {
- j++
- }
-
- // this should never be the case, but be careful
- if f.Type == nil {
- continue
- }
-
- // The predeclared identifier "any" is visible only as a constraint
- // in a type parameter list. Look for it before general constraint
- // resolution.
- if tident, _ := unparen(f.Type).(*syntax.Name); tident != nil && tident.Value == "any" && check.lookup("any") == nil {
- bound = universeAny
- } else {
- bound = check.typ(f.Type)
- }
-
- // type bound must be an interface
- // TODO(gri) We should delay the interface check because
- // we may not have a complete interface yet:
- // type C(type T C) interface {}
- // (issue #39724).
- if _, ok := under(bound).(*Interface); ok {
- // set the type bounds
- for i < j {
- tparams[i].typ.(*TypeParam).bound = bound
- i++
- }
- } else if bound != Typ[Invalid] {
- check.errorf(f.Type, "%s is not an interface", bound)
+ for i, f := range list {
+ // Optimization: Re-use the previous type bound if it hasn't changed.
+ // This also preserves the grouped output of type parameter lists
+ // when printing type strings.
+ if i == 0 || f.Type != list[i-1].Type {
+ bound = check.boundType(f.Type)
}
+ tparams[i].bound = bound
}
- return
+ return bindTParams(tparams)
}
-func (check *Checker) declareTypeParam(tparams []*TypeName, name *syntax.Name) []*TypeName {
- tpar := NewTypeName(name.Pos(), check.pkg, name.Value, nil)
- check.NewTypeParam(tpar, len(tparams), &emptyInterface) // assigns type to tpar as a side-effect
- check.declare(check.scope, name, tpar, check.scope.pos) // TODO(gri) check scope position
- tparams = append(tparams, tpar)
+func (check *Checker) declareTypeParam(name *syntax.Name) *TypeParam {
+ tname := NewTypeName(name.Pos(), check.pkg, name.Value, nil)
+ tpar := check.NewTypeParam(tname, nil) // assigns type to tname as a side-effect
+ check.declare(check.scope, name, tname, check.scope.pos) // TODO(gri) check scope position
+ return tpar
+}
- if check.conf.Trace {
- check.trace(name.Pos(), "type param = %v", tparams[len(tparams)-1])
+// boundType type-checks the type expression e and returns its type, or Typ[Invalid].
+// The type must be an interface, including the predeclared type "any".
+func (check *Checker) boundType(e syntax.Expr) Type {
+ // The predeclared identifier "any" is visible only as a type bound in a type parameter list.
+ // If we allow "any" for general use, this if-statement can be removed (issue #33232).
+ if name, _ := unparen(e).(*syntax.Name); name != nil && name.Value == "any" && check.lookup("any") == universeAny {
+ return universeAny.Type()
}
- return tparams
+ bound := check.typ(e)
+ check.later(func() {
+ u := under(bound)
+ if _, ok := u.(*Interface); !ok && u != Typ[Invalid] {
+ check.errorf(e, "%s is not an interface", bound)
+ }
+ })
+ return bound
}
func (check *Checker) collectMethods(obj *TypeName) {
@@ -737,7 +650,8 @@ func (check *Checker) collectMethods(obj *TypeName) {
// and field names must be distinct."
base := asNamed(obj.typ) // shouldn't fail but be conservative
if base != nil {
- if t, _ := base.underlying.(*Struct); t != nil {
+ u := safeUnderlying(base) // base should be expanded, but use safeUnderlying to be conservative
+ if t, _ := u.(*Struct); t != nil {
for _, fld := range t.fields {
if fld.name != "_" {
assert(mset.insert(fld) == nil)
@@ -779,6 +693,7 @@ func (check *Checker) collectMethods(obj *TypeName) {
}
if base != nil {
+ base.load() // TODO(mdempsky): Probably unnecessary.
base.methods = append(base.methods, m)
}
}
@@ -805,6 +720,10 @@ func (check *Checker) funcDecl(obj *Func, decl *declInfo) {
check.funcType(sig, fdecl.Recv, fdecl.TParamList, fdecl.Type)
obj.color_ = saved
+ if len(fdecl.TParamList) > 0 && fdecl.Body == nil {
+ check.softErrorf(fdecl, "parameterized function is missing function body")
+ }
+
// function body must be type-checked after global declarations
// (functions implemented elsewhere have no body)
if !check.conf.IgnoreFuncBodies && fdecl.Body != nil {
diff --git a/src/cmd/compile/internal/types2/errorcalls_test.go b/src/cmd/compile/internal/types2/errorcalls_test.go
index 28bb33aaff..80b05f9f0f 100644
--- a/src/cmd/compile/internal/types2/errorcalls_test.go
+++ b/src/cmd/compile/internal/types2/errorcalls_test.go
@@ -18,7 +18,7 @@ func TestErrorCalls(t *testing.T) {
}
for _, file := range files {
- syntax.Walk(file, func(n syntax.Node) bool {
+ syntax.Crawl(file, func(n syntax.Node) bool {
call, _ := n.(*syntax.CallExpr)
if call == nil {
return false
diff --git a/src/cmd/compile/internal/types2/errors.go b/src/cmd/compile/internal/types2/errors.go
index af4ecb2300..a68273271b 100644
--- a/src/cmd/compile/internal/types2/errors.go
+++ b/src/cmd/compile/internal/types2/errors.go
@@ -88,7 +88,7 @@ func sprintf(qf Qualifier, format string, args ...interface{}) string {
case nil:
arg = "<nil>"
case operand:
- panic("internal error: should always pass *operand")
+ panic("got operand instead of *operand")
case *operand:
arg = operandString(a, qf)
case syntax.Pos:
@@ -111,7 +111,7 @@ func (check *Checker) qualifier(pkg *Package) string {
if check.pkgPathMap == nil {
check.pkgPathMap = make(map[string]map[string]bool)
check.seenPkgMap = make(map[*Package]bool)
- check.markImports(pkg)
+ check.markImports(check.pkg)
}
// If the same package name was used by multiple packages, display the full path.
if len(check.pkgPathMap[pkg.name]) > 1 {
@@ -148,7 +148,7 @@ func (check *Checker) sprintf(format string, args ...interface{}) string {
func (check *Checker) report(err *error_) {
if err.empty() {
- panic("internal error: reporting no error")
+ panic("no error to report")
}
check.err(err.pos(), err.msg(check.qualifier), err.soft)
}
diff --git a/src/cmd/compile/internal/types2/expr.go b/src/cmd/compile/internal/types2/expr.go
index 23b79656bb..d108093dac 100644
--- a/src/cmd/compile/internal/types2/expr.go
+++ b/src/cmd/compile/internal/types2/expr.go
@@ -157,6 +157,14 @@ var op2str2 = [...]string{
syntax.Shl: "shift",
}
+func underIs(typ Type, f func(Type) bool) bool {
+ u := under(typ)
+ if tpar, _ := u.(*TypeParam); tpar != nil {
+ return tpar.underIs(f)
+ }
+ return f(u)
+}
+
func (check *Checker) unary(x *operand, e *syntax.Operation) {
check.expr(x, e.X)
if x.mode == invalid {
@@ -177,19 +185,29 @@ func (check *Checker) unary(x *operand, e *syntax.Operation) {
return
case syntax.Recv:
- typ := asChan(x.typ)
- if typ == nil {
- check.errorf(x, invalidOp+"cannot receive from non-channel %s", x)
- x.mode = invalid
- return
- }
- if typ.dir == SendOnly {
- check.errorf(x, invalidOp+"cannot receive from send-only channel %s", x)
+ var elem Type
+ if !underIs(x.typ, func(u Type) bool {
+ ch, _ := u.(*Chan)
+ if ch == nil {
+ check.errorf(x, invalidOp+"cannot receive from non-channel %s", x)
+ return false
+ }
+ if ch.dir == SendOnly {
+ check.errorf(x, invalidOp+"cannot receive from send-only channel %s", x)
+ return false
+ }
+ if elem != nil && !Identical(ch.elem, elem) {
+ check.errorf(x, invalidOp+"channels of %s must have the same element type", x)
+ return false
+ }
+ elem = ch.elem
+ return true
+ }) {
x.mode = invalid
return
}
x.mode = commaok
- x.typ = typ.elem
+ x.typ = elem
check.hasCallOrRecv = true
return
}
@@ -643,7 +661,7 @@ func (check *Checker) updateExprVal(x syntax.Expr, val constant.Value) {
func (check *Checker) convertUntyped(x *operand, target Type) {
newType, val, code := check.implicitTypeAndValue(x, target)
if code != 0 {
- check.invalidConversion(code, x, target.Underlying())
+ check.invalidConversion(code, x, safeUnderlying(target))
x.mode = invalid
return
}
@@ -664,7 +682,6 @@ func (check *Checker) convertUntyped(x *operand, target Type) {
// If x is a constant operand, the returned constant.Value will be the
// representation of x in this context.
func (check *Checker) implicitTypeAndValue(x *operand, target Type) (Type, constant.Value, errorCode) {
- target = expand(target)
if x.mode == invalid || isTyped(x.typ) || target == Typ[Invalid] {
return x.typ, nil, 0
}
@@ -691,7 +708,7 @@ func (check *Checker) implicitTypeAndValue(x *operand, target Type) (Type, const
return nil, nil, _InvalidUntypedConversion
}
- switch t := optype(target).(type) {
+ switch t := under(target).(type) {
case *Basic:
if x.mode == constant_ {
v, code := check.representation(x, t)
@@ -723,8 +740,8 @@ func (check *Checker) implicitTypeAndValue(x *operand, target Type) (Type, const
default:
return nil, nil, _InvalidUntypedConversion
}
- case *Sum:
- ok := t.is(func(t Type) bool {
+ case *TypeParam:
+ ok := t.underIs(func(t Type) bool {
target, _, _ := check.implicitTypeAndValue(x, t)
return target != nil
})
@@ -735,7 +752,6 @@ func (check *Checker) implicitTypeAndValue(x *operand, target Type) (Type, const
// Update operand types to the default type rather than the target
// (interface) type: values must have concrete dynamic types.
// Untyped nil was handled upfront.
- check.completeInterface(nopos, t)
if !t.Empty() {
return nil, nil, _InvalidUntypedConversion // cannot assign untyped values to non-empty interfaces
}
@@ -972,14 +988,28 @@ func (check *Checker) binary(x *operand, e syntax.Expr, lhs, rhs syntax.Expr, op
return
}
- check.convertUntyped(x, y.typ)
- if x.mode == invalid {
- return
+ canMix := func(x, y *operand) bool {
+ if IsInterface(x.typ) || IsInterface(y.typ) {
+ return true
+ }
+ if isBoolean(x.typ) != isBoolean(y.typ) {
+ return false
+ }
+ if isString(x.typ) != isString(y.typ) {
+ return false
+ }
+ return true
}
- check.convertUntyped(&y, x.typ)
- if y.mode == invalid {
- x.mode = invalid
- return
+ if canMix(x, &y) {
+ check.convertUntyped(x, y.typ)
+ if x.mode == invalid {
+ return
+ }
+ check.convertUntyped(&y, x.typ)
+ if y.mode == invalid {
+ x.mode = invalid
+ return
+ }
}
if isComparison(op) {
@@ -987,7 +1017,7 @@ func (check *Checker) binary(x *operand, e syntax.Expr, lhs, rhs syntax.Expr, op
return
}
- if !check.identical(x.typ, y.typ) {
+ if !Identical(x.typ, y.typ) {
// only report an error if we have valid types
// (otherwise we had an error reported elsewhere already)
if x.typ != Typ[Invalid] && y.typ != Typ[Invalid] {
@@ -1316,7 +1346,7 @@ func (check *Checker) exprInternal(x *operand, e syntax.Expr, hint Type) exprKin
xkey := keyVal(x.val)
if asInterface(utyp.key) != nil {
for _, vtyp := range visited[xkey] {
- if check.identical(vtyp, x.typ) {
+ if Identical(vtyp, x.typ) {
duplicate = true
break
}
@@ -1448,13 +1478,24 @@ func (check *Checker) exprInternal(x *operand, e syntax.Expr, hint Type) exprKin
case typexpr:
x.typ = &Pointer{base: x.typ}
default:
- if typ := asPointer(x.typ); typ != nil {
- x.mode = variable
- x.typ = typ.base
- } else {
- check.errorf(x, invalidOp+"cannot indirect %s", x)
+ var base Type
+ if !underIs(x.typ, func(u Type) bool {
+ p, _ := u.(*Pointer)
+ if p == nil {
+ check.errorf(x, invalidOp+"cannot indirect %s", x)
+ return false
+ }
+ if base != nil && !Identical(p.base, base) {
+ check.errorf(x, invalidOp+"pointers of %s must have identical base types", x)
+ return false
+ }
+ base = p.base
+ return true
+ }) {
goto Error
}
+ x.mode = variable
+ x.typ = base
}
break
}
@@ -1537,7 +1578,7 @@ func (check *Checker) typeAssertion(pos syntax.Pos, x *operand, xtyp *Interface,
}
var msg string
if wrongType != nil {
- if check.identical(method.typ, wrongType.typ) {
+ if Identical(method.typ, wrongType.typ) {
msg = fmt.Sprintf("missing method %s (%s has pointer receiver)", method.name, method.name)
} else {
msg = fmt.Sprintf("wrong type for method %s (have %s, want %s)", method.name, wrongType.typ, method.typ)
diff --git a/src/cmd/compile/internal/types2/index.go b/src/cmd/compile/internal/types2/index.go
index c94017a8fb..e8755a1a68 100644
--- a/src/cmd/compile/internal/types2/index.go
+++ b/src/cmd/compile/internal/types2/index.go
@@ -32,7 +32,7 @@ func (check *Checker) indexExpr(x *operand, e *syntax.IndexExpr) (isFuncInst boo
return false
case value:
- if sig := asSignature(x.typ); sig != nil && len(sig.tparams) > 0 {
+ if sig := asSignature(x.typ); sig != nil && sig.TParams().Len() > 0 {
// function instantiation
return true
}
@@ -41,7 +41,7 @@ func (check *Checker) indexExpr(x *operand, e *syntax.IndexExpr) (isFuncInst boo
// ordinary index expression
valid := false
length := int64(-1) // valid if >= 0
- switch typ := optype(x.typ).(type) {
+ switch typ := under(x.typ).(type) {
case *Basic:
if isString(typ) {
valid = true
@@ -80,7 +80,7 @@ func (check *Checker) indexExpr(x *operand, e *syntax.IndexExpr) (isFuncInst boo
index := check.singleIndex(e)
if index == nil {
x.mode = invalid
- return
+ return false
}
var key operand
check.expr(&key, index)
@@ -89,87 +89,80 @@ func (check *Checker) indexExpr(x *operand, e *syntax.IndexExpr) (isFuncInst boo
x.mode = mapindex
x.typ = typ.elem
x.expr = e
- return
+ return false
- case *Sum:
- // A sum type can be indexed if all of the sum's types
- // support indexing and have the same index and element
- // type. Special rules apply for maps in the sum type.
- var tkey, telem Type // key is for map types only
- nmaps := 0 // number of map types in sum type
- if typ.is(func(t Type) bool {
- var e Type
- switch t := under(t).(type) {
+ case *TypeParam:
+ // TODO(gri) report detailed failure cause for better error messages
+ var tkey, telem Type // tkey != nil if we have maps
+ if typ.underIs(func(u Type) bool {
+ var key, elem Type
+ alen := int64(-1) // valid if >= 0
+ switch t := u.(type) {
case *Basic:
- if isString(t) {
- e = universeByte
+ if !isString(t) {
+ return false
}
+ elem = universeByte
case *Array:
- e = t.elem
+ elem = t.elem
+ alen = t.len
case *Pointer:
- if t := asArray(t.base); t != nil {
- e = t.elem
+ a, _ := under(t.base).(*Array)
+ if a == nil {
+ return false
}
+ elem = a.elem
+ alen = a.len
case *Slice:
- e = t.elem
+ elem = t.elem
case *Map:
- // If there are multiple maps in the sum type,
- // they must have identical key types.
- // TODO(gri) We may be able to relax this rule
- // but it becomes complicated very quickly.
- if tkey != nil && !Identical(t.key, tkey) {
+ key = t.key
+ elem = t.elem
+ default:
+ return false
+ }
+ assert(elem != nil)
+ if telem == nil {
+ // first type
+ tkey, telem = key, elem
+ length = alen
+ } else {
+ // all map keys must be identical (incl. all nil)
+ if !Identical(key, tkey) {
return false
}
- tkey = t.key
- e = t.elem
- nmaps++
- case *TypeParam:
- check.errorf(x, "type of %s contains a type parameter - cannot index (implementation restriction)", x)
- case *instance:
- panic("unimplemented")
- }
- if e == nil || telem != nil && !Identical(e, telem) {
- return false
+ // all element types must be identical
+ if !Identical(elem, telem) {
+ return false
+ }
+ tkey, telem = key, elem
+ // track the minimal length for arrays
+ if alen >= 0 && alen < length {
+ length = alen
+ }
}
- telem = e
return true
}) {
- // If there are maps, the index expression must be assignable
- // to the map key type (as for simple map index expressions).
- if nmaps > 0 {
+ // For maps, the index expression must be assignable to the map key type.
+ if tkey != nil {
index := check.singleIndex(e)
if index == nil {
x.mode = invalid
- return
+ return false
}
var key operand
check.expr(&key, index)
check.assignment(&key, tkey, "map index")
// ok to continue even if indexing failed - map element type is known
-
- // If there are only maps, we are done.
- if nmaps == len(typ.types) {
- x.mode = mapindex
- x.typ = telem
- x.expr = e
- return
- }
-
- // Otherwise we have mix of maps and other types. For
- // now we require that the map key be an integer type.
- // TODO(gri) This is probably not good enough.
- valid = isInteger(tkey)
- // avoid 2nd indexing error if indexing failed above
- if !valid && key.mode == invalid {
- x.mode = invalid
- return
- }
- x.mode = value // map index expressions are not addressable
- } else {
- // no maps
- valid = true
- x.mode = variable
+ x.mode = mapindex
+ x.typ = telem
+ x.expr = e
+ return false
}
+
+ // no maps
+ valid = true
+ x.mode = variable
x.typ = telem
}
}
@@ -177,13 +170,13 @@ func (check *Checker) indexExpr(x *operand, e *syntax.IndexExpr) (isFuncInst boo
if !valid {
check.errorf(x, invalidOp+"cannot index %s", x)
x.mode = invalid
- return
+ return false
}
index := check.singleIndex(e)
if index == nil {
x.mode = invalid
- return
+ return false
}
// In pathological (invalid) cases (e.g.: type T1 [][[]T1{}[0][0]]T0)
@@ -206,7 +199,7 @@ func (check *Checker) sliceExpr(x *operand, e *syntax.SliceExpr) {
valid := false
length := int64(-1) // valid if >= 0
- switch typ := optype(x.typ).(type) {
+ switch typ := under(x.typ).(type) {
case *Basic:
if isString(typ) {
if e.Full {
@@ -246,7 +239,7 @@ func (check *Checker) sliceExpr(x *operand, e *syntax.SliceExpr) {
valid = true
// x.typ doesn't change
- case *Sum, *TypeParam:
+ case *TypeParam:
check.error(x, "generic slice expressions not yet implemented")
x.mode = invalid
return
diff --git a/src/cmd/compile/internal/types2/infer.go b/src/cmd/compile/internal/types2/infer.go
index f37d7f6477..5badecc070 100644
--- a/src/cmd/compile/internal/types2/infer.go
+++ b/src/cmd/compile/internal/types2/infer.go
@@ -28,7 +28,7 @@ const useConstraintTypeInference = true
//
// Constraint type inference is used after each step to expand the set of type arguments.
//
-func (check *Checker) infer(pos syntax.Pos, tparams []*TypeName, targs []Type, params *Tuple, args []*operand, report bool) (result []Type) {
+func (check *Checker) infer(pos syntax.Pos, tparams []*TypeParam, targs []Type, params *Tuple, args []*operand, report bool) (result []Type) {
if debug {
defer func() {
assert(result == nil || len(result) == len(tparams))
@@ -83,18 +83,18 @@ func (check *Checker) infer(pos syntax.Pos, tparams []*TypeName, targs []Type, p
// Substitute type arguments for their respective type parameters in params,
// if any. Note that nil targs entries are ignored by check.subst.
- // TODO(gri) Can we avoid this (we're setting known type argumemts below,
+ // TODO(gri) Can we avoid this (we're setting known type arguments below,
// but that doesn't impact the isParameterized check for now).
if params.Len() > 0 {
smap := makeSubstMap(tparams, targs)
- params = check.subst(nopos, params, smap).(*Tuple)
+ params = check.subst(nopos, params, smap, nil).(*Tuple)
}
// --- 2 ---
// Unify parameter and argument types for generic parameters with typed arguments
// and collect the indices of generic parameters with untyped arguments.
// Terminology: generic parameter = function parameter with a type-parameterized type
- u := newUnifier(check, false)
+ u := newUnifier(false)
u.x.init(tparams)
// Set the type arguments which we know already.
@@ -122,12 +122,12 @@ func (check *Checker) infer(pos syntax.Pos, tparams []*TypeName, targs []Type, p
}
}
if allFailed {
- check.errorf(arg, "%s %s of %s does not match %s (cannot infer %s)", kind, targ, arg.expr, tpar, typeNamesString(tparams))
+ check.errorf(arg, "%s %s of %s does not match %s (cannot infer %s)", kind, targ, arg.expr, tpar, typeParamsString(tparams))
return
}
}
smap := makeSubstMap(tparams, targs)
- inferred := check.subst(arg.Pos(), tpar, smap)
+ inferred := check.subst(arg.Pos(), tpar, smap, nil)
if inferred != tpar {
check.errorf(arg, "%s %s of %s does not match inferred type %s for %s", kind, targ, arg.expr, inferred, tpar)
} else {
@@ -222,23 +222,23 @@ func (check *Checker) infer(pos syntax.Pos, tparams []*TypeName, targs []Type, p
assert(targs != nil && index >= 0 && targs[index] == nil)
tpar := tparams[index]
if report {
- check.errorf(pos, "cannot infer %s (%s) (%s)", tpar.name, tpar.pos, targs)
+ check.errorf(pos, "cannot infer %s (%s) (%s)", tpar.obj.name, tpar.obj.pos, targs)
}
return nil
}
-// typeNamesString produces a string containing all the
-// type names in list suitable for human consumption.
-func typeNamesString(list []*TypeName) string {
+// typeParamsString produces a string of the type parameter names
+// in list suitable for human consumption.
+func typeParamsString(list []*TypeParam) string {
// common cases
n := len(list)
switch n {
case 0:
return ""
case 1:
- return list[0].name
+ return list[0].obj.name
case 2:
- return list[0].name + " and " + list[1].name
+ return list[0].obj.name + " and " + list[1].obj.name
}
// general case (n > 2)
@@ -248,15 +248,15 @@ func typeNamesString(list []*TypeName) string {
if i > 0 {
b.WriteString(", ")
}
- b.WriteString(tname.name)
+ b.WriteString(tname.obj.name)
}
b.WriteString(", and ")
- b.WriteString(list[n-1].name)
+ b.WriteString(list[n-1].obj.name)
return b.String()
}
// IsParameterized reports whether typ contains any of the type parameters of tparams.
-func isParameterized(tparams []*TypeName, typ Type) bool {
+func isParameterized(tparams []*TypeParam, typ Type) bool {
w := tpWalker{
seen: make(map[Type]bool),
tparams: tparams,
@@ -266,7 +266,7 @@ func isParameterized(tparams []*TypeName, typ Type) bool {
type tpWalker struct {
seen map[Type]bool
- tparams []*TypeName
+ tparams []*TypeParam
}
func (w *tpWalker) isParameterized(typ Type) (res bool) {
@@ -280,7 +280,7 @@ func (w *tpWalker) isParameterized(typ Type) (res bool) {
}()
switch t := typ.(type) {
- case nil, *Basic: // TODO(gri) should nil be handled here?
+ case nil, *top, *Basic: // TODO(gri) should nil be handled here?
break
case *Array:
@@ -307,9 +307,6 @@ func (w *tpWalker) isParameterized(typ Type) (res bool) {
}
}
- case *Sum:
- return w.isParameterizedList(t.types)
-
case *Signature:
// t.tparams may not be nil if we are looking at a signature
// of a generic function type (or an interface method) that is
@@ -321,24 +318,15 @@ func (w *tpWalker) isParameterized(typ Type) (res bool) {
return w.isParameterized(t.params) || w.isParameterized(t.results)
case *Interface:
- if t.allMethods != nil {
- // interface is complete - quick test
- for _, m := range t.allMethods {
- if w.isParameterized(m.typ) {
- return true
- }
+ tset := t.typeSet()
+ for _, m := range tset.methods {
+ if w.isParameterized(m.typ) {
+ return true
}
- return w.isParameterizedList(unpack(t.allTypes))
}
-
- return t.iterate(func(t *Interface) bool {
- for _, m := range t.methods {
- if w.isParameterized(m.typ) {
- return true
- }
- }
- return w.isParameterizedList(unpack(t.types))
- }, nil)
+ return tset.is(func(t *term) bool {
+ return w.isParameterized(t.typ)
+ })
case *Map:
return w.isParameterized(t.key) || w.isParameterized(t.elem)
@@ -347,14 +335,11 @@ func (w *tpWalker) isParameterized(typ Type) (res bool) {
return w.isParameterized(t.elem)
case *Named:
- return w.isParameterizedList(t.targs)
+ return w.isParameterizedTypeList(t.targs.list())
case *TypeParam:
// t must be one of w.tparams
- return t.index < len(w.tparams) && w.tparams[t.index].typ == t
-
- case *instance:
- return w.isParameterizedList(t.targs)
+ return t.index < len(w.tparams) && w.tparams[t.index] == t
default:
unreachable()
@@ -363,7 +348,7 @@ func (w *tpWalker) isParameterized(typ Type) (res bool) {
return false
}
-func (w *tpWalker) isParameterizedList(list []Type) bool {
+func (w *tpWalker) isParameterizedTypeList(list []Type) bool {
for _, t := range list {
if w.isParameterized(t) {
return true
@@ -380,12 +365,12 @@ func (w *tpWalker) isParameterizedList(list []Type) bool {
// first type argument in that list that couldn't be inferred (and thus is nil). If all
// type arguments were inferred successfully, index is < 0. The number of type arguments
// provided may be less than the number of type parameters, but there must be at least one.
-func (check *Checker) inferB(tparams []*TypeName, targs []Type, report bool) (types []Type, index int) {
+func (check *Checker) inferB(tparams []*TypeParam, targs []Type, report bool) (types []Type, index int) {
assert(len(tparams) >= len(targs) && len(targs) > 0)
// Setup bidirectional unification between those structural bounds
// and the corresponding type arguments (which may be nil!).
- u := newUnifier(check, false)
+ u := newUnifier(false)
u.x.init(tparams)
u.y = u.x // type parameters between LHS and RHS of unification are identical
@@ -398,12 +383,12 @@ func (check *Checker) inferB(tparams []*TypeName, targs []Type, report bool) (ty
// Unify type parameters with their structural constraints, if any.
for _, tpar := range tparams {
- typ := tpar.typ.(*TypeParam)
- sbound := check.structuralType(typ.bound)
+ typ := tpar
+ sbound := typ.structuralType()
if sbound != nil {
if !u.unify(typ, sbound) {
if report {
- check.errorf(tpar, "%s does not match %s", tpar, sbound)
+ check.errorf(tpar.obj, "%s does not match %s", tpar.obj, sbound)
}
return nil, 0
}
@@ -412,8 +397,8 @@ func (check *Checker) inferB(tparams []*TypeName, targs []Type, report bool) (ty
// u.x.types() now contains the incoming type arguments plus any additional type
// arguments for which there were structural constraints. The newly inferred non-
- // nil entries may still contain references to other type parameters. For instance,
- // for [A any, B interface{type []C}, C interface{type *A}], if A == int
+ // nil entries may still contain references to other type parameters.
+ // For instance, for [A any, B interface{ []C }, C interface{ *A }], if A == int
// was given, unification produced the type list [int, []C, *A]. We eliminate the
// remaining type parameters by substituting the type parameters in this type list
// until nothing changes anymore.
@@ -442,7 +427,7 @@ func (check *Checker) inferB(tparams []*TypeName, targs []Type, report bool) (ty
n := 0
for _, index := range dirty {
t0 := types[index]
- if t1 := check.subst(nopos, t0, smap); t1 != t0 {
+ if t1 := check.subst(nopos, t0, smap, nil); t1 != t0 {
types[index] = t1
dirty[n] = index
n++
@@ -472,16 +457,3 @@ func (check *Checker) inferB(tparams []*TypeName, targs []Type, report bool) (ty
return
}
-
-// structuralType returns the structural type of a constraint, if any.
-func (check *Checker) structuralType(constraint Type) Type {
- if iface, _ := under(constraint).(*Interface); iface != nil {
- check.completeInterface(nopos, iface)
- types := unpack(iface.allTypes)
- if len(types) == 1 {
- return types[0]
- }
- return nil
- }
- return constraint
-}
diff --git a/src/cmd/compile/internal/types2/instantiate.go b/src/cmd/compile/internal/types2/instantiate.go
index 0df52e851c..8bea63ec86 100644
--- a/src/cmd/compile/internal/types2/instantiate.go
+++ b/src/cmd/compile/internal/types2/instantiate.go
@@ -2,30 +2,155 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
+// This file implements instantiation of generic types
+// through substitution of type parameters by type arguments.
+
package types2
import (
"cmd/compile/internal/syntax"
+ "errors"
"fmt"
)
-// Instantiate instantiates the type typ with the given type arguments.
-// typ must be a *Named or a *Signature type, it must be generic, and
-// its number of type parameters must match the number of provided type
-// arguments. The result is a new, instantiated (not generic) type of
-// the same kind (either a *Named or a *Signature). The type arguments
-// are not checked against the constraints of the type parameters.
-// Any methods attached to a *Named are simply copied; they are not
-// instantiated.
-func Instantiate(pos syntax.Pos, typ Type, targs []Type) (res Type) {
- // TODO(gri) This code is basically identical to the prolog
- // in Checker.instantiate. Factor.
- var tparams []*TypeName
+// An Environment is an opaque type checking environment. It may be used to
+// share identical type instances across type checked packages or calls to
+// Instantiate.
+type Environment struct {
+ // For now, Environment just hides a Checker.
+ // Eventually, we strive to remove the need for a checker.
+ check *Checker
+}
+
+// NewEnvironment returns a new Environment, initialized with the given
+// Checker, or nil.
+func NewEnvironment(check *Checker) *Environment {
+ return &Environment{check}
+}
+
+// Instantiate instantiates the type typ with the given type arguments targs.
+// typ must be a *Named or a *Signature type, and its number of type parameters
+// must match the number of provided type arguments. The result is a new,
+// instantiated (not parameterized) type of the same kind (either a *Named or a
+// *Signature). Any methods attached to a *Named are simply copied; they are
+// not instantiated.
+//
+// If env is non-nil, it may be used to de-dupe the instance against previous
+// instances with the same identity. This functionality is implemented for
+// environments with non-nil Checkers.
+//
+// If verify is set and constraint satisfaction fails, the returned error may
+// be of dynamic type ArgumentError indicating which type argument did not
+// satisfy its corresponding type parameter constraint, and why.
+//
+// TODO(rfindley): change this function to also return an error if lengths of
+// tparams and targs do not match.
+func Instantiate(env *Environment, typ Type, targs []Type, validate bool) (Type, error) {
+ var check *Checker
+ if env != nil {
+ check = env.check
+ }
+ inst := check.instance(nopos, typ, targs)
+
+ var err error
+ if validate {
+ var tparams []*TypeParam
+ switch t := typ.(type) {
+ case *Named:
+ tparams = t.TParams().list()
+ case *Signature:
+ tparams = t.TParams().list()
+ }
+ if i, err := check.verify(nopos, tparams, targs); err != nil {
+ return inst, ArgumentError{i, err}
+ }
+ }
+
+ return inst, err
+}
+
+// instantiate creates an instance and defers verification of constraints to
+// later in the type checking pass. For Named types the resulting instance will
+// be unexpanded.
+func (check *Checker) instantiate(pos syntax.Pos, typ Type, targs []Type, posList []syntax.Pos) (res Type) {
+ assert(check != nil)
+ if check.conf.Trace {
+ check.trace(pos, "-- instantiating %s with %s", typ, typeListString(targs))
+ check.indent++
+ defer func() {
+ check.indent--
+ var under Type
+ if res != nil {
+ // Calling under() here may lead to endless instantiations.
+ // Test case: type T[P any] T[P]
+ // TODO(gri) investigate if that's a bug or to be expected.
+ under = safeUnderlying(res)
+ }
+ check.trace(pos, "=> %s (under = %s)", res, under)
+ }()
+ }
+
+ inst := check.instance(pos, typ, targs)
+
+ assert(len(posList) <= len(targs))
+ check.later(func() {
+ // Collect tparams again because lazily loaded *Named types may not have
+ // had tparams set up above.
+ var tparams []*TypeParam
+ switch t := typ.(type) {
+ case *Named:
+ tparams = t.TParams().list()
+ case *Signature:
+ tparams = t.TParams().list()
+ }
+ // Avoid duplicate errors; instantiate will have complained if tparams
+ // and targs do not have the same length.
+ if len(tparams) == len(targs) {
+ if i, err := check.verify(pos, tparams, targs); err != nil {
+ // best position for error reporting
+ pos := pos
+ if i < len(posList) {
+ pos = posList[i]
+ }
+ check.softErrorf(pos, err.Error())
+ }
+ }
+ })
+ return inst
+}
+
+// instance creates a type or function instance using the given original type
+// typ and arguments targs. For Named types the resulting instance will be
+// unexpanded.
+func (check *Checker) instance(pos syntax.Pos, typ Type, targs []Type) (res Type) {
+ // TODO(gri) What is better here: work with TypeParams, or work with TypeNames?
switch t := typ.(type) {
case *Named:
- tparams = t.tparams
+ h := instantiatedHash(t, targs)
+ if check != nil {
+ // typ may already have been instantiated with identical type arguments. In
+ // that case, re-use the existing instance.
+ if named := check.typMap[h]; named != nil {
+ return named
+ }
+ }
+
+ tname := NewTypeName(pos, t.obj.pkg, t.obj.name, nil)
+ named := check.newNamed(tname, t, nil, nil, nil) // methods and tparams are set when named is loaded
+ named.targs = NewTypeList(targs)
+ named.instance = &instance{pos}
+ if check != nil {
+ check.typMap[h] = named
+ }
+ res = named
case *Signature:
- tparams = t.tparams
+ tparams := t.TParams()
+ if !check.validateTArgLen(pos, tparams.Len(), len(targs)) {
+ return Typ[Invalid]
+ }
+ if tparams.Len() == 0 {
+ return typ // nothing to do (minor optimization)
+ }
defer func() {
// If we had an unexpected failure somewhere don't panic below when
// asserting res.(*Signature). Check for *Signature in case Typ[Invalid]
@@ -44,20 +169,125 @@ func Instantiate(pos syntax.Pos, typ Type, targs []Type) (res Type) {
// anymore; we need to set tparams to nil.
res.(*Signature).tparams = nil
}()
-
+ res = check.subst(pos, typ, makeSubstMap(tparams.list(), targs), nil)
default:
+ // only types and functions can be generic
panic(fmt.Sprintf("%v: cannot instantiate %v", pos, typ))
}
+ return res
+}
- // the number of supplied types must match the number of type parameters
- if len(targs) != len(tparams) {
- panic(fmt.Sprintf("%v: got %d arguments but %d type parameters", pos, len(targs), len(tparams)))
+// validateTArgLen verifies that the length of targs and tparams matches,
+// reporting an error if not. If validation fails and check is nil,
+// validateTArgLen panics.
+func (check *Checker) validateTArgLen(pos syntax.Pos, ntparams, ntargs int) bool {
+ if ntargs != ntparams {
+ // TODO(gri) provide better error message
+ if check != nil {
+ check.errorf(pos, "got %d arguments but %d type parameters", ntargs, ntparams)
+ return false
+ }
+ panic(fmt.Sprintf("%v: got %d arguments but %d type parameters", pos, ntargs, ntparams))
}
+ return true
+}
- if len(tparams) == 0 {
- return typ // nothing to do (minor optimization)
+func (check *Checker) verify(pos syntax.Pos, tparams []*TypeParam, targs []Type) (int, error) {
+ smap := makeSubstMap(tparams, targs)
+ for i, tpar := range tparams {
+ // stop checking bounds after the first failure
+ if err := check.satisfies(pos, targs[i], tpar, smap); err != nil {
+ return i, err
+ }
}
+ return -1, nil
+}
- smap := makeSubstMap(tparams, targs)
- return (*Checker)(nil).subst(pos, typ, smap)
+// satisfies reports whether the type argument targ satisfies the constraint of type parameter
+// parameter tpar (after any of its type parameters have been substituted through smap).
+// A suitable error is reported if the result is false.
+// TODO(gri) This should be a method of interfaces or type sets.
+func (check *Checker) satisfies(pos syntax.Pos, targ Type, tpar *TypeParam, smap substMap) error {
+ iface := tpar.iface()
+ if iface.Empty() {
+ return nil // no type bound
+ }
+
+ // TODO(rfindley): it would be great if users could pass in a qualifier here,
+ // rather than falling back to verbose qualification. Maybe this can be part
+ // of a the shared environment.
+ var qf Qualifier
+ if check != nil {
+ qf = check.qualifier
+ }
+ errorf := func(format string, args ...interface{}) error {
+ return errors.New(sprintf(qf, format, args...))
+ }
+
+ // The type parameter bound is parameterized with the same type parameters
+ // as the instantiated type; before we can use it for bounds checking we
+ // need to instantiate it with the type arguments with which we instantiate
+ // the parameterized type.
+ iface = check.subst(pos, iface, smap, nil).(*Interface)
+
+ // if iface is comparable, targ must be comparable
+ // TODO(gri) the error messages needs to be better, here
+ if iface.IsComparable() && !Comparable(targ) {
+ if tpar := asTypeParam(targ); tpar != nil && tpar.iface().typeSet().IsAll() {
+ return errorf("%s has no constraints", targ)
+ }
+ return errorf("%s does not satisfy comparable", targ)
+ }
+
+ // targ must implement iface (methods)
+ // - check only if we have methods
+ if iface.NumMethods() > 0 {
+ // If the type argument is a pointer to a type parameter, the type argument's
+ // method set is empty.
+ // TODO(gri) is this what we want? (spec question)
+ if base, isPtr := deref(targ); isPtr && asTypeParam(base) != nil {
+ return errorf("%s has no methods", targ)
+ }
+ if m, wrong := check.missingMethod(targ, iface, true); m != nil {
+ // TODO(gri) needs to print updated name to avoid major confusion in error message!
+ // (print warning for now)
+ // Old warning:
+ // check.softErrorf(pos, "%s does not satisfy %s (warning: name not updated) = %s (missing method %s)", targ, tpar.bound, iface, m)
+ if wrong != nil {
+ // TODO(gri) This can still report uninstantiated types which makes the error message
+ // more difficult to read then necessary.
+ return errorf("%s does not satisfy %s: wrong method signature\n\tgot %s\n\twant %s",
+ targ, tpar.bound, wrong, m,
+ )
+ }
+ return errorf("%s does not satisfy %s (missing method %s)", targ, tpar.bound, m.name)
+ }
+ }
+
+ // targ's underlying type must also be one of the interface types listed, if any
+ if !iface.typeSet().hasTerms() {
+ return nil // nothing to do
+ }
+
+ // If targ is itself a type parameter, each of its possible types, but at least one, must be in the
+ // list of iface types (i.e., the targ type list must be a non-empty subset of the iface types).
+ if targ := asTypeParam(targ); targ != nil {
+ targBound := targ.iface()
+ if !targBound.typeSet().hasTerms() {
+ return errorf("%s does not satisfy %s (%s has no type constraints)", targ, tpar.bound, targ)
+ }
+ if !targBound.typeSet().subsetOf(iface.typeSet()) {
+ // TODO(gri) need better error message
+ return errorf("%s does not satisfy %s", targ, tpar.bound)
+ }
+ return nil
+ }
+
+ // Otherwise, targ's type or underlying type must also be one of the interface types listed, if any.
+ if !iface.typeSet().includes(targ) {
+ // TODO(gri) better error message
+ return errorf("%s does not satisfy %s", targ, tpar.bound)
+ }
+
+ return nil
}
diff --git a/src/cmd/compile/internal/types2/interface.go b/src/cmd/compile/internal/types2/interface.go
new file mode 100644
index 0000000000..ccd3de0a6e
--- /dev/null
+++ b/src/cmd/compile/internal/types2/interface.go
@@ -0,0 +1,226 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package types2
+
+import "cmd/compile/internal/syntax"
+
+// ----------------------------------------------------------------------------
+// API
+
+// An Interface represents an interface type.
+type Interface struct {
+ obj *TypeName // corresponding declared object; or nil (for better error messages)
+ methods []*Func // ordered list of explicitly declared methods
+ embeddeds []Type // ordered list of explicitly embedded elements
+ embedPos *[]syntax.Pos // positions of embedded elements; or nil (for error messages) - use pointer to save space
+ complete bool // indicates that all fields (except for tset) are set up
+
+ tset *_TypeSet // type set described by this interface, computed lazily
+}
+
+// typeSet returns the type set for interface t.
+func (t *Interface) typeSet() *_TypeSet { return computeInterfaceTypeSet(nil, nopos, t) }
+
+// emptyInterface represents the empty interface
+var emptyInterface = Interface{complete: true, tset: &topTypeSet}
+
+// NewInterface returns a new interface for the given methods and embedded types.
+// NewInterface takes ownership of the provided methods and may modify their types
+// by setting missing receivers.
+//
+// Deprecated: Use NewInterfaceType instead which allows arbitrary embedded types.
+func NewInterface(methods []*Func, embeddeds []*Named) *Interface {
+ tnames := make([]Type, len(embeddeds))
+ for i, t := range embeddeds {
+ tnames[i] = t
+ }
+ return NewInterfaceType(methods, tnames)
+}
+
+// NewInterfaceType returns a new interface for the given methods and embedded types.
+// NewInterfaceType takes ownership of the provided methods and may modify their types
+// by setting missing receivers.
+func NewInterfaceType(methods []*Func, embeddeds []Type) *Interface {
+ if len(methods) == 0 && len(embeddeds) == 0 {
+ return &emptyInterface
+ }
+
+ // set method receivers if necessary
+ typ := new(Interface)
+ for _, m := range methods {
+ if sig := m.typ.(*Signature); sig.recv == nil {
+ sig.recv = NewVar(m.pos, m.pkg, "", typ)
+ }
+ }
+
+ // sort for API stability
+ sortMethods(methods)
+
+ typ.methods = methods
+ typ.embeddeds = embeddeds
+ typ.complete = true
+
+ return typ
+}
+
+// NumExplicitMethods returns the number of explicitly declared methods of interface t.
+func (t *Interface) NumExplicitMethods() int { return len(t.methods) }
+
+// ExplicitMethod returns the i'th explicitly declared method of interface t for 0 <= i < t.NumExplicitMethods().
+// The methods are ordered by their unique Id.
+func (t *Interface) ExplicitMethod(i int) *Func { return t.methods[i] }
+
+// NumEmbeddeds returns the number of embedded types in interface t.
+func (t *Interface) NumEmbeddeds() int { return len(t.embeddeds) }
+
+// Embedded returns the i'th embedded defined (*Named) type of interface t for 0 <= i < t.NumEmbeddeds().
+// The result is nil if the i'th embedded type is not a defined type.
+//
+// Deprecated: Use EmbeddedType which is not restricted to defined (*Named) types.
+func (t *Interface) Embedded(i int) *Named { tname, _ := t.embeddeds[i].(*Named); return tname }
+
+// EmbeddedType returns the i'th embedded type of interface t for 0 <= i < t.NumEmbeddeds().
+func (t *Interface) EmbeddedType(i int) Type { return t.embeddeds[i] }
+
+// NumMethods returns the total number of methods of interface t.
+func (t *Interface) NumMethods() int { return t.typeSet().NumMethods() }
+
+// Method returns the i'th method of interface t for 0 <= i < t.NumMethods().
+// The methods are ordered by their unique Id.
+func (t *Interface) Method(i int) *Func { return t.typeSet().Method(i) }
+
+// Empty reports whether t is the empty interface.
+func (t *Interface) Empty() bool { return t.typeSet().IsAll() }
+
+// IsComparable reports whether each type in interface t's type set is comparable.
+func (t *Interface) IsComparable() bool { return t.typeSet().IsComparable() }
+
+// IsConstraint reports whether interface t is not just a method set.
+func (t *Interface) IsConstraint() bool { return !t.typeSet().IsMethodSet() }
+
+func (t *Interface) Underlying() Type { return t }
+func (t *Interface) String() string { return TypeString(t, nil) }
+
+// ----------------------------------------------------------------------------
+// Implementation
+
+func (check *Checker) interfaceType(ityp *Interface, iface *syntax.InterfaceType, def *Named) {
+ var tlist []syntax.Expr // types collected from all type lists
+ var tname *syntax.Name // most recent "type" name
+
+ addEmbedded := func(pos syntax.Pos, typ Type) {
+ ityp.embeddeds = append(ityp.embeddeds, typ)
+ if ityp.embedPos == nil {
+ ityp.embedPos = new([]syntax.Pos)
+ }
+ *ityp.embedPos = append(*ityp.embedPos, pos)
+ }
+
+ for _, f := range iface.MethodList {
+ if f.Name == nil {
+ // We have an embedded type; possibly a union of types.
+ addEmbedded(posFor(f.Type), parseUnion(check, flattenUnion(nil, f.Type)))
+ continue
+ }
+ // f.Name != nil
+
+ // We have a method with name f.Name, or a type of a type list (f.Name.Value == "type").
+ name := f.Name.Value
+ if name == "_" {
+ if check.conf.CompilerErrorMessages {
+ check.error(f.Name, "methods must have a unique non-blank name")
+ } else {
+ check.error(f.Name, "invalid method name _")
+ }
+ continue // ignore
+ }
+
+ // TODO(gri) Remove type list handling once the parser doesn't accept type lists anymore.
+ if name == "type" {
+ // Report an error for the first type list per interface
+ // if we don't allow type lists, but continue.
+ if !check.conf.AllowTypeLists && tlist == nil {
+ check.softErrorf(f.Name, "use generalized embedding syntax instead of a type list")
+ }
+ // For now, collect all type list entries as if it
+ // were a single union, where each union element is
+ // of the form ~T.
+ op := new(syntax.Operation)
+ // We should also set the position (but there is no setter);
+ // we don't care because this code will eventually go away.
+ op.Op = syntax.Tilde
+ op.X = f.Type
+ tlist = append(tlist, op)
+ // Report an error if we have multiple type lists in an
+ // interface, but only if they are permitted in the first place.
+ if check.conf.AllowTypeLists && tname != nil && tname != f.Name {
+ check.error(f.Name, "cannot have multiple type lists in an interface")
+ }
+ tname = f.Name
+ continue
+ }
+
+ typ := check.typ(f.Type)
+ sig, _ := typ.(*Signature)
+ if sig == nil {
+ if typ != Typ[Invalid] {
+ check.errorf(f.Type, invalidAST+"%s is not a method signature", typ)
+ }
+ continue // ignore
+ }
+
+ // Always type-check method type parameters but complain if they are not enabled.
+ // (This extra check is needed here because interface method signatures don't have
+ // a receiver specification.)
+ if sig.tparams != nil && !acceptMethodTypeParams {
+ check.error(f.Type, "methods cannot have type parameters")
+ }
+
+ // use named receiver type if available (for better error messages)
+ var recvTyp Type = ityp
+ if def != nil {
+ recvTyp = def
+ }
+ sig.recv = NewVar(f.Name.Pos(), check.pkg, "", recvTyp)
+
+ m := NewFunc(f.Name.Pos(), check.pkg, name, sig)
+ check.recordDef(f.Name, m)
+ ityp.methods = append(ityp.methods, m)
+ }
+
+ // If we saw a type list, add it like an embedded union.
+ if tlist != nil {
+ // Types T in a type list are added as ~T expressions but we don't
+ // have the position of the '~'. Use the first type position instead.
+ addEmbedded(tlist[0].(*syntax.Operation).X.Pos(), parseUnion(check, tlist))
+ }
+
+ // All methods and embedded elements for this interface are collected;
+ // i.e., this interface is may be used in a type set computation.
+ ityp.complete = true
+
+ if len(ityp.methods) == 0 && len(ityp.embeddeds) == 0 {
+ // empty interface
+ ityp.tset = &topTypeSet
+ return
+ }
+
+ // sort for API stability
+ // (don't sort embeddeds: they must correspond to *embedPos entries)
+ sortMethods(ityp.methods)
+
+ // Compute type set with a non-nil *Checker as soon as possible
+ // to report any errors. Subsequent uses of type sets will use
+ // this computed type set and won't need to pass in a *Checker.
+ check.later(func() { computeInterfaceTypeSet(check, iface.Pos(), ityp) })
+}
+
+func flattenUnion(list []syntax.Expr, x syntax.Expr) []syntax.Expr {
+ if o, _ := x.(*syntax.Operation); o != nil && o.Op == syntax.Or {
+ list = flattenUnion(list, o.X)
+ x = o.Y
+ }
+ return append(list, x)
+}
diff --git a/src/cmd/compile/internal/types2/issues_test.go b/src/cmd/compile/internal/types2/issues_test.go
index e716a48038..9890b79323 100644
--- a/src/cmd/compile/internal/types2/issues_test.go
+++ b/src/cmd/compile/internal/types2/issues_test.go
@@ -321,7 +321,7 @@ func TestIssue25627(t *testing.T) {
}
}
- syntax.Walk(f, func(n syntax.Node) bool {
+ syntax.Crawl(f, func(n syntax.Node) bool {
if decl, _ := n.(*syntax.TypeDecl); decl != nil {
if tv, ok := info.Types[decl.Type]; ok && decl.Name.Value == "T" {
want := strings.Count(src, ";") + 1
@@ -402,8 +402,9 @@ func TestIssue28282(t *testing.T) {
// create type interface { error }
et := Universe.Lookup("error").Type()
it := NewInterfaceType(nil, []Type{et})
- it.Complete()
// verify that after completing the interface, the embedded method remains unchanged
+ // (interfaces are "completed" lazily now, so the completion happens implicitly when
+ // accessing Method(0))
want := et.Underlying().(*Interface).Method(0)
got := it.Method(0)
if got != want {
diff --git a/src/cmd/compile/internal/types2/labels.go b/src/cmd/compile/internal/types2/labels.go
index d3206988b5..6f02e2fc96 100644
--- a/src/cmd/compile/internal/types2/labels.go
+++ b/src/cmd/compile/internal/types2/labels.go
@@ -32,7 +32,8 @@ func (check *Checker) labels(body *syntax.BlockStmt) {
}
// spec: "It is illegal to define a label that is never used."
- for _, obj := range all.elems {
+ for name, obj := range all.elems {
+ obj = resolve(name, obj)
if lbl := obj.(*Label); !lbl.used {
check.softErrorf(lbl.pos, "label %s declared but not used", lbl.name)
}
diff --git a/src/cmd/compile/internal/types2/lookup.go b/src/cmd/compile/internal/types2/lookup.go
index 78299502e9..d0718e51e2 100644
--- a/src/cmd/compile/internal/types2/lookup.go
+++ b/src/cmd/compile/internal/types2/lookup.go
@@ -6,6 +6,11 @@
package types2
+// Internal use of LookupFieldOrMethod: If the obj result is a method
+// associated with a concrete (non-interface) type, the method's signature
+// may not be fully set up. Call Checker.objDecl(obj, nil) before accessing
+// the method's type.
+
// LookupFieldOrMethod looks up a field or method with given package and name
// in T and returns the corresponding *Var or *Func, an index sequence, and a
// bool indicating if there were any pointer indirections on the path to the
@@ -33,19 +38,6 @@ package types2
// the method's formal receiver base type, nor was the receiver addressable.
//
func LookupFieldOrMethod(T Type, addressable bool, pkg *Package, name string) (obj Object, index []int, indirect bool) {
- return (*Checker)(nil).lookupFieldOrMethod(T, addressable, pkg, name)
-}
-
-// Internal use of Checker.lookupFieldOrMethod: If the obj result is a method
-// associated with a concrete (non-interface) type, the method's signature
-// may not be fully set up. Call Checker.objDecl(obj, nil) before accessing
-// the method's type.
-// TODO(gri) Now that we provide the *Checker, we can probably remove this
-// caveat by calling Checker.objDecl from lookupFieldOrMethod. Investigate.
-
-// lookupFieldOrMethod is like the external version but completes interfaces
-// as necessary.
-func (check *Checker) lookupFieldOrMethod(T Type, addressable bool, pkg *Package, name string) (obj Object, index []int, indirect bool) {
// Methods cannot be associated to a named pointer type
// (spec: "The type denoted by T is called the receiver base type;
// it must not be a pointer or interface type and it must be declared
@@ -54,8 +46,8 @@ func (check *Checker) lookupFieldOrMethod(T Type, addressable bool, pkg *Package
// pointer type but discard the result if it is a method since we would
// not have found it for T (see also issue 8590).
if t := asNamed(T); t != nil {
- if p, _ := t.underlying.(*Pointer); p != nil {
- obj, index, indirect = check.rawLookupFieldOrMethod(p, false, pkg, name)
+ if p, _ := safeUnderlying(t).(*Pointer); p != nil {
+ obj, index, indirect = lookupFieldOrMethod(p, false, pkg, name)
if _, ok := obj.(*Func); ok {
return nil, nil, false
}
@@ -63,7 +55,7 @@ func (check *Checker) lookupFieldOrMethod(T Type, addressable bool, pkg *Package
}
}
- return check.rawLookupFieldOrMethod(T, addressable, pkg, name)
+ return lookupFieldOrMethod(T, addressable, pkg, name)
}
// TODO(gri) The named type consolidation and seen maps below must be
@@ -71,10 +63,9 @@ func (check *Checker) lookupFieldOrMethod(T Type, addressable bool, pkg *Package
// types always have only one representation (even when imported
// indirectly via different packages.)
-// rawLookupFieldOrMethod should only be called by lookupFieldOrMethod and missingMethod.
-func (check *Checker) rawLookupFieldOrMethod(T Type, addressable bool, pkg *Package, name string) (obj Object, index []int, indirect bool) {
+// lookupFieldOrMethod should only be called by LookupFieldOrMethod and missingMethod.
+func lookupFieldOrMethod(T Type, addressable bool, pkg *Package, name string) (obj Object, index []int, indirect bool) {
// WARNING: The code in this function is extremely subtle - do not modify casually!
- // This function and NewMethodSet should be kept in sync.
if name == "_" {
return // blank fields/methods are never found
@@ -82,10 +73,15 @@ func (check *Checker) rawLookupFieldOrMethod(T Type, addressable bool, pkg *Pack
typ, isPtr := deref(T)
- // *typ where typ is an interface has no methods.
- // Be cautious: typ may be nil (issue 39634, crash #3).
- if typ == nil || isPtr && IsInterface(typ) {
- return
+ // *typ where typ is an interface or type parameter has no methods.
+ if isPtr {
+ // don't look at under(typ) here - was bug (issue #47747)
+ if _, ok := typ.(*TypeParam); ok {
+ return
+ }
+ if _, ok := under(typ).(*Interface); ok {
+ return
+ }
}
// Start with typ as single entry at shallowest depth.
@@ -126,6 +122,7 @@ func (check *Checker) rawLookupFieldOrMethod(T Type, addressable bool, pkg *Pack
seen[named] = true
// look for a matching attached method
+ named.load()
if i, m := lookupMethod(named.methods, pkg, name); m != nil {
// potential match
// caution: method may not have a proper signature yet
@@ -181,9 +178,7 @@ func (check *Checker) rawLookupFieldOrMethod(T Type, addressable bool, pkg *Pack
case *Interface:
// look for a matching method
- // TODO(gri) t.allMethods is sorted - use binary search
- check.completeInterface(nopos, t)
- if i, m := lookupMethod(t.allMethods, pkg, name); m != nil {
+ if i, m := t.typeSet().LookupMethod(pkg, name); m != nil {
assert(m.typ != nil)
index = concat(e.index, i)
if obj != nil || e.multiples {
@@ -194,7 +189,7 @@ func (check *Checker) rawLookupFieldOrMethod(T Type, addressable bool, pkg *Pack
}
case *TypeParam:
- if i, m := lookupMethod(t.Bound().allMethods, pkg, name); m != nil {
+ if i, m := t.iface().typeSet().LookupMethod(pkg, name); m != nil {
assert(m.typ != nil)
index = concat(e.index, i)
if obj != nil || e.multiples {
@@ -229,7 +224,7 @@ func (check *Checker) rawLookupFieldOrMethod(T Type, addressable bool, pkg *Pack
return
}
- current = check.consolidateMultiples(next)
+ current = consolidateMultiples(next)
}
return nil, nil, false // not found
@@ -246,7 +241,7 @@ type embeddedType struct {
// consolidateMultiples collects multiple list entries with the same type
// into a single entry marked as containing multiples. The result is the
// consolidated list.
-func (check *Checker) consolidateMultiples(list []embeddedType) []embeddedType {
+func consolidateMultiples(list []embeddedType) []embeddedType {
if len(list) <= 1 {
return list // at most one entry - nothing to do
}
@@ -254,7 +249,7 @@ func (check *Checker) consolidateMultiples(list []embeddedType) []embeddedType {
n := 0 // number of entries w/ unique type
prev := make(map[Type]int) // index at which type was previously seen
for _, e := range list {
- if i, found := check.lookupType(prev, e.typ); found {
+ if i, found := lookupType(prev, e.typ); found {
list[i].multiples = true
// ignore this entry
} else {
@@ -266,14 +261,14 @@ func (check *Checker) consolidateMultiples(list []embeddedType) []embeddedType {
return list[:n]
}
-func (check *Checker) lookupType(m map[Type]int, typ Type) (int, bool) {
+func lookupType(m map[Type]int, typ Type) (int, bool) {
// fast path: maybe the types are equal
if i, found := m[typ]; found {
return i, true
}
for t, i := range m {
- if check.identical(t, typ) {
+ if Identical(t, typ) {
return i, true
}
}
@@ -306,22 +301,18 @@ func MissingMethod(V Type, T *Interface, static bool) (method *Func, wrongType b
// To improve error messages, also report the wrong signature
// when the method exists on *V instead of V.
func (check *Checker) missingMethod(V Type, T *Interface, static bool) (method, wrongType *Func) {
- check.completeInterface(nopos, T)
-
// fast path for common case
if T.Empty() {
return
}
if ityp := asInterface(V); ityp != nil {
- check.completeInterface(nopos, ityp)
- // TODO(gri) allMethods is sorted - can do this more efficiently
- for _, m := range T.allMethods {
- _, f := lookupMethod(ityp.allMethods, m.pkg, m.name)
+ // TODO(gri) the methods are sorted - could do this more efficiently
+ for _, m := range T.typeSet().methods {
+ _, f := ityp.typeSet().LookupMethod(m.pkg, m.name)
if f == nil {
- // if m is the magic method == we're ok (interfaces are comparable)
- if m.name == "==" || !static {
+ if !static {
continue
}
return m, f
@@ -330,17 +321,20 @@ func (check *Checker) missingMethod(V Type, T *Interface, static bool) (method,
// both methods must have the same number of type parameters
ftyp := f.typ.(*Signature)
mtyp := m.typ.(*Signature)
- if len(ftyp.tparams) != len(mtyp.tparams) {
+ if ftyp.TParams().Len() != mtyp.TParams().Len() {
return m, f
}
+ if !acceptMethodTypeParams && ftyp.TParams().Len() > 0 {
+ panic("method with type parameters")
+ }
// If the methods have type parameters we don't care whether they
// are the same or not, as long as they match up. Use unification
// to see if they can be made to match.
// TODO(gri) is this always correct? what about type bounds?
// (Alternative is to rename/subst type parameters and compare.)
- u := newUnifier(check, true)
- u.x.init(ftyp.tparams)
+ u := newUnifier(true)
+ u.x.init(ftyp.TParams().list())
if !u.unify(ftyp, mtyp) {
return m, f
}
@@ -352,14 +346,14 @@ func (check *Checker) missingMethod(V Type, T *Interface, static bool) (method,
// A concrete type implements T if it implements all methods of T.
Vd, _ := deref(V)
Vn := asNamed(Vd)
- for _, m := range T.allMethods {
+ for _, m := range T.typeSet().methods {
// TODO(gri) should this be calling lookupFieldOrMethod instead (and why not)?
- obj, _, _ := check.rawLookupFieldOrMethod(V, false, m.pkg, m.name)
+ obj, _, _ := lookupFieldOrMethod(V, false, m.pkg, m.name)
// Check if *V implements this method of T.
if obj == nil {
ptr := NewPointer(V)
- obj, _, _ = check.rawLookupFieldOrMethod(ptr, false, m.pkg, m.name)
+ obj, _, _ = lookupFieldOrMethod(ptr, false, m.pkg, m.name)
if obj != nil {
return m, obj.(*Func)
}
@@ -368,10 +362,6 @@ func (check *Checker) missingMethod(V Type, T *Interface, static bool) (method,
// we must have a method (not a field of matching function type)
f, _ := obj.(*Func)
if f == nil {
- // if m is the magic method == and V is comparable, we're ok
- if m.name == "==" && Comparable(V) {
- continue
- }
return m, nil
}
@@ -383,9 +373,12 @@ func (check *Checker) missingMethod(V Type, T *Interface, static bool) (method,
// both methods must have the same number of type parameters
ftyp := f.typ.(*Signature)
mtyp := m.typ.(*Signature)
- if len(ftyp.tparams) != len(mtyp.tparams) {
+ if ftyp.TParams().Len() != mtyp.TParams().Len() {
return m, f
}
+ if !acceptMethodTypeParams && ftyp.TParams().Len() > 0 {
+ panic("method with type parameters")
+ }
// If V is a (instantiated) generic type, its methods are still
// parameterized using the original (declaration) receiver type
@@ -394,17 +387,17 @@ func (check *Checker) missingMethod(V Type, T *Interface, static bool) (method,
// In order to compare the signatures, substitute the receiver
// type parameters of ftyp with V's instantiation type arguments.
// This lazily instantiates the signature of method f.
- if Vn != nil && len(Vn.tparams) > 0 {
+ if Vn != nil && Vn.TParams().Len() > 0 {
// Be careful: The number of type arguments may not match
// the number of receiver parameters. If so, an error was
// reported earlier but the length discrepancy is still
// here. Exit early in this case to prevent an assertion
// failure in makeSubstMap.
// TODO(gri) Can we avoid this check by fixing the lengths?
- if len(ftyp.rparams) != len(Vn.targs) {
+ if len(ftyp.RParams().list()) != Vn.targs.Len() {
return
}
- ftyp = check.subst(nopos, ftyp, makeSubstMap(ftyp.rparams, Vn.targs)).(*Signature)
+ ftyp = check.subst(nopos, ftyp, makeSubstMap(ftyp.RParams().list(), Vn.targs.list()), nil).(*Signature)
}
// If the methods have type parameters we don't care whether they
@@ -412,8 +405,21 @@ func (check *Checker) missingMethod(V Type, T *Interface, static bool) (method,
// to see if they can be made to match.
// TODO(gri) is this always correct? what about type bounds?
// (Alternative is to rename/subst type parameters and compare.)
- u := newUnifier(check, true)
- u.x.init(ftyp.tparams)
+ u := newUnifier(true)
+ if ftyp.TParams().Len() > 0 {
+ // We reach here only if we accept method type parameters.
+ // In this case, unification must consider any receiver
+ // and method type parameters as "free" type parameters.
+ assert(acceptMethodTypeParams)
+ // We don't have a test case for this at the moment since
+ // we can't parse method type parameters. Keeping the
+ // unimplemented call so that we test this code if we
+ // enable method type parameters.
+ unimplemented()
+ u.x.init(append(ftyp.RParams().list(), ftyp.TParams().list()...))
+ } else {
+ u.x.init(ftyp.RParams().list())
+ }
if !u.unify(ftyp, mtyp) {
return m, f
}
diff --git a/src/cmd/compile/internal/types2/map.go b/src/cmd/compile/internal/types2/map.go
new file mode 100644
index 0000000000..0d3464caae
--- /dev/null
+++ b/src/cmd/compile/internal/types2/map.go
@@ -0,0 +1,24 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package types2
+
+// A Map represents a map type.
+type Map struct {
+ key, elem Type
+}
+
+// NewMap returns a new map for the given key and element types.
+func NewMap(key, elem Type) *Map {
+ return &Map{key: key, elem: elem}
+}
+
+// Key returns the key type of map m.
+func (m *Map) Key() Type { return m.key }
+
+// Elem returns the element type of map m.
+func (m *Map) Elem() Type { return m.elem }
+
+func (t *Map) Underlying() Type { return t }
+func (t *Map) String() string { return TypeString(t, nil) }
diff --git a/src/cmd/compile/internal/types2/named.go b/src/cmd/compile/internal/types2/named.go
new file mode 100644
index 0000000000..a3a2595a22
--- /dev/null
+++ b/src/cmd/compile/internal/types2/named.go
@@ -0,0 +1,295 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package types2
+
+import (
+ "cmd/compile/internal/syntax"
+ "sync"
+)
+
+// TODO(gri) Clean up Named struct below; specifically the fromRHS field (can we use underlying?).
+
+// A Named represents a named (defined) type.
+type Named struct {
+ check *Checker
+ info typeInfo // for cycle detection
+ obj *TypeName // corresponding declared object for declared types; placeholder for instantiated types
+ orig *Named // original, uninstantiated type
+ fromRHS Type // type (on RHS of declaration) this *Named type is derived from (for cycle reporting)
+ underlying Type // possibly a *Named during setup; never a *Named once set up completely
+ instance *instance // position information for lazy instantiation, or nil
+ tparams *TParamList // type parameters, or nil
+ targs *TypeList // type arguments (after instantiation), or nil
+ methods []*Func // methods declared for this type (not the method set of this type); signatures are type-checked lazily
+
+ resolve func(*Named) ([]*TypeParam, Type, []*Func)
+ once sync.Once
+}
+
+// NewNamed returns a new named type for the given type name, underlying type, and associated methods.
+// If the given type name obj doesn't have a type yet, its type is set to the returned named type.
+// The underlying type must not be a *Named.
+func NewNamed(obj *TypeName, underlying Type, methods []*Func) *Named {
+ if _, ok := underlying.(*Named); ok {
+ panic("underlying type must not be *Named")
+ }
+ return (*Checker)(nil).newNamed(obj, nil, underlying, nil, methods)
+}
+
+func (t *Named) load() *Named {
+ // If t is an instantiated type, it derives its methods and tparams from its
+ // base type. Since we expect type parameters and methods to be set after a
+ // call to load, we must load the base and copy here.
+ //
+ // underlying is set when t is expanded.
+ //
+ // By convention, a type instance is loaded iff its tparams are set.
+ if t.targs.Len() > 0 && t.tparams == nil {
+ t.orig.load()
+ t.tparams = t.orig.tparams
+ t.methods = t.orig.methods
+ }
+ if t.resolve == nil {
+ return t
+ }
+
+ t.once.Do(func() {
+ // TODO(mdempsky): Since we're passing t to resolve anyway
+ // (necessary because types2 expects the receiver type for methods
+ // on defined interface types to be the Named rather than the
+ // underlying Interface), maybe it should just handle calling
+ // SetTParams, SetUnderlying, and AddMethod instead? Those
+ // methods would need to support reentrant calls though. It would
+ // also make the API more future-proof towards further extensions
+ // (like SetTParams).
+
+ tparams, underlying, methods := t.resolve(t)
+
+ switch underlying.(type) {
+ case nil, *Named:
+ panic("invalid underlying type")
+ }
+
+ t.tparams = bindTParams(tparams)
+ t.underlying = underlying
+ t.methods = methods
+ })
+ return t
+}
+
+// newNamed is like NewNamed but with a *Checker receiver and additional orig argument.
+func (check *Checker) newNamed(obj *TypeName, orig *Named, underlying Type, tparams *TParamList, methods []*Func) *Named {
+ typ := &Named{check: check, obj: obj, orig: orig, fromRHS: underlying, underlying: underlying, tparams: tparams, methods: methods}
+ if typ.orig == nil {
+ typ.orig = typ
+ }
+ if obj.typ == nil {
+ obj.typ = typ
+ }
+ // Ensure that typ is always expanded, at which point the check field can be
+ // nilled out.
+ //
+ // Note that currently we cannot nil out check inside typ.under(), because
+ // it's possible that typ is expanded multiple times.
+ //
+ // TODO(gri): clean this up so that under is the only function mutating
+ // named types.
+ if check != nil {
+ check.later(func() {
+ switch typ.under().(type) {
+ case *Named:
+ panic("unexpanded underlying type")
+ }
+ typ.check = nil
+ })
+ }
+ return typ
+}
+
+// Obj returns the type name for the declaration defining the named type t. For
+// instantiated types, this is the type name of the base type.
+func (t *Named) Obj() *TypeName {
+ return t.orig.obj // for non-instances this is the same as t.obj
+}
+
+// Orig returns the original generic type an instantiated type is derived from.
+// If t is not an instantiated type, the result is t.
+func (t *Named) Orig() *Named { return t.orig }
+
+// TODO(gri) Come up with a better representation and API to distinguish
+// between parameterized instantiated and non-instantiated types.
+
+// TParams returns the type parameters of the named type t, or nil.
+// The result is non-nil for an (originally) parameterized type even if it is instantiated.
+func (t *Named) TParams() *TParamList { return t.load().tparams }
+
+// SetTParams sets the type parameters of the named type t.
+func (t *Named) SetTParams(tparams []*TypeParam) { t.load().tparams = bindTParams(tparams) }
+
+// TArgs returns the type arguments used to instantiate the named type t.
+func (t *Named) TArgs() *TypeList { return t.targs }
+
+// NumMethods returns the number of explicit methods whose receiver is named type t.
+func (t *Named) NumMethods() int { return len(t.load().methods) }
+
+// Method returns the i'th method of named type t for 0 <= i < t.NumMethods().
+func (t *Named) Method(i int) *Func { return t.load().methods[i] }
+
+// SetUnderlying sets the underlying type and marks t as complete.
+func (t *Named) SetUnderlying(underlying Type) {
+ if underlying == nil {
+ panic("underlying type must not be nil")
+ }
+ if _, ok := underlying.(*Named); ok {
+ panic("underlying type must not be *Named")
+ }
+ t.load().underlying = underlying
+}
+
+// AddMethod adds method m unless it is already in the method list.
+func (t *Named) AddMethod(m *Func) {
+ t.load()
+ if i, _ := lookupMethod(t.methods, m.pkg, m.name); i < 0 {
+ t.methods = append(t.methods, m)
+ }
+}
+
+func (t *Named) Underlying() Type { return t.load().expand(nil).underlying }
+func (t *Named) String() string { return TypeString(t, nil) }
+
+// ----------------------------------------------------------------------------
+// Implementation
+
+// under returns the expanded underlying type of n0; possibly by following
+// forward chains of named types. If an underlying type is found, resolve
+// the chain by setting the underlying type for each defined type in the
+// chain before returning it. If no underlying type is found or a cycle
+// is detected, the result is Typ[Invalid]. If a cycle is detected and
+// n0.check != nil, the cycle is reported.
+func (n0 *Named) under() Type {
+ u := n0.Underlying()
+
+ // If the underlying type of a defined type is not a defined
+ // (incl. instance) type, then that is the desired underlying
+ // type.
+ var n1 *Named
+ switch u1 := u.(type) {
+ case nil:
+ return Typ[Invalid]
+ default:
+ // common case
+ return u
+ case *Named:
+ // handled below
+ n1 = u1
+ }
+
+ if n0.check == nil {
+ panic("Named.check == nil but type is incomplete")
+ }
+
+ // Invariant: after this point n0 as well as any named types in its
+ // underlying chain should be set up when this function exits.
+ check := n0.check
+ n := n0
+
+ seen := make(map[*Named]int) // types that need their underlying resolved
+ var path []Object // objects encountered, for cycle reporting
+
+loop:
+ for {
+ seen[n] = len(seen)
+ path = append(path, n.obj)
+ n = n1
+ if i, ok := seen[n]; ok {
+ // cycle
+ check.cycleError(path[i:])
+ u = Typ[Invalid]
+ break
+ }
+ u = n.Underlying()
+ switch u1 := u.(type) {
+ case nil:
+ u = Typ[Invalid]
+ break loop
+ default:
+ break loop
+ case *Named:
+ // Continue collecting *Named types in the chain.
+ n1 = u1
+ }
+ }
+
+ for n := range seen {
+ // We should never have to update the underlying type of an imported type;
+ // those underlying types should have been resolved during the import.
+ // Also, doing so would lead to a race condition (was issue #31749).
+ // Do this check always, not just in debug mode (it's cheap).
+ if n.obj.pkg != check.pkg {
+ panic("imported type with unresolved underlying type")
+ }
+ n.underlying = u
+ }
+
+ return u
+}
+
+func (n *Named) setUnderlying(typ Type) {
+ if n != nil {
+ n.underlying = typ
+ }
+}
+
+// instance holds position information for use in lazy instantiation.
+//
+// TODO(rfindley): instance is probably unnecessary now. See if it can be
+// eliminated.
+type instance struct {
+ pos syntax.Pos // position of type instantiation; for error reporting only
+}
+
+// expand ensures that the underlying type of n is instantiated.
+// The underlying type will be Typ[Invalid] if there was an error.
+func (n *Named) expand(typMap map[string]*Named) *Named {
+ if n.instance != nil {
+ // n must be loaded before instantiation, in order to have accurate
+ // tparams. This is done implicitly by the call to n.TParams, but making it
+ // explicit is harmless: load is idempotent.
+ n.load()
+ var u Type
+ if n.check.validateTArgLen(n.instance.pos, n.tparams.Len(), n.targs.Len()) {
+ if typMap == nil {
+ if n.check != nil {
+ typMap = n.check.typMap
+ } else {
+ // If we're instantiating lazily, we might be outside the scope of a
+ // type-checking pass. In that case we won't have a pre-existing
+ // typMap, but don't want to create a duplicate of the current instance
+ // in the process of expansion.
+ h := instantiatedHash(n.orig, n.targs.list())
+ typMap = map[string]*Named{h: n}
+ }
+ }
+ u = n.check.subst(n.instance.pos, n.orig.underlying, makeSubstMap(n.TParams().list(), n.targs.list()), typMap)
+ } else {
+ u = Typ[Invalid]
+ }
+ n.underlying = u
+ n.fromRHS = u
+ n.instance = nil
+ }
+ return n
+}
+
+// safeUnderlying returns the underlying of typ without expanding instances, to
+// avoid infinite recursion.
+//
+// TODO(rfindley): eliminate this function or give it a better name.
+func safeUnderlying(typ Type) Type {
+ if t, _ := typ.(*Named); t != nil {
+ return t.load().underlying
+ }
+ return typ.Underlying()
+}
diff --git a/src/cmd/compile/internal/types2/object.go b/src/cmd/compile/internal/types2/object.go
index 844bc34b6a..c1466f24ed 100644
--- a/src/cmd/compile/internal/types2/object.go
+++ b/src/cmd/compile/internal/types2/object.go
@@ -186,6 +186,45 @@ func (obj *object) sameId(pkg *Package, name string) bool {
return pkg.path == obj.pkg.path
}
+// less reports whether object a is ordered before object b.
+//
+// Objects are ordered nil before non-nil, exported before
+// non-exported, then by name, and finally (for non-exported
+// functions) by package height and path.
+func (a *object) less(b *object) bool {
+ if a == b {
+ return false
+ }
+
+ // Nil before non-nil.
+ if a == nil {
+ return true
+ }
+ if b == nil {
+ return false
+ }
+
+ // Exported functions before non-exported.
+ ea := isExported(a.name)
+ eb := isExported(b.name)
+ if ea != eb {
+ return ea
+ }
+
+ // Order by name and then (for non-exported names) by package.
+ if a.name != b.name {
+ return a.name < b.name
+ }
+ if !ea {
+ if a.pkg.height != b.pkg.height {
+ return a.pkg.height < b.pkg.height
+ }
+ return a.pkg.path < b.pkg.path
+ }
+
+ return false
+}
+
// A PkgName represents an imported Go package.
// PkgNames don't have a type.
type PkgName struct {
@@ -237,6 +276,14 @@ func NewTypeName(pos syntax.Pos, pkg *Package, name string, typ Type) *TypeName
return &TypeName{object{nil, pos, pkg, name, typ, 0, colorFor(typ), nopos}}
}
+// NewTypeNameLazy returns a new defined type like NewTypeName, but it
+// lazily calls resolve to finish constructing the Named object.
+func NewTypeNameLazy(pos syntax.Pos, pkg *Package, name string, resolve func(named *Named) (tparams []*TypeParam, underlying Type, methods []*Func)) *TypeName {
+ obj := NewTypeName(pos, pkg, name, nil)
+ NewNamed(obj, nil, nil).resolve = resolve
+ return obj
+}
+
// IsAlias reports whether obj is an alias name for a type.
func (obj *TypeName) IsAlias() bool {
switch t := obj.typ.(type) {
@@ -329,36 +376,6 @@ func (obj *Func) FullName() string {
// Scope returns the scope of the function's body block.
func (obj *Func) Scope() *Scope { return obj.typ.(*Signature).scope }
-// Less reports whether function a is ordered before function b.
-//
-// Functions are ordered exported before non-exported, then by name,
-// and finally (for non-exported functions) by package path.
-//
-// TODO(gri) The compiler also sorts by package height before package
-// path for non-exported names.
-func (a *Func) less(b *Func) bool {
- if a == b {
- return false
- }
-
- // Exported functions before non-exported.
- ea := isExported(a.name)
- eb := isExported(b.name)
- if ea != eb {
- return ea
- }
-
- // Order by name and then (for non-exported names) by package.
- if a.name != b.name {
- return a.name < b.name
- }
- if !ea {
- return a.pkg.path < b.pkg.path
- }
-
- return false
-}
-
func (*Func) isDependency() {} // a function may be a dependency of an initialization expression
// A Label represents a declared label.
@@ -458,6 +475,9 @@ func writeObject(buf *bytes.Buffer, obj Object, qf Qualifier) {
if _, ok := typ.(*Basic); ok {
return
}
+ if named, _ := typ.(*Named); named != nil && named.TParams().Len() > 0 {
+ writeTParamList(buf, named.TParams().list(), qf, nil)
+ }
if tname.IsAlias() {
buf.WriteString(" =")
} else {
diff --git a/src/cmd/compile/internal/types2/object_test.go b/src/cmd/compile/internal/types2/object_test.go
index 7f63c79332..a86733a5c9 100644
--- a/src/cmd/compile/internal/types2/object_test.go
+++ b/src/cmd/compile/internal/types2/object_test.go
@@ -25,7 +25,7 @@ func TestIsAlias(t *testing.T) {
check(Unsafe.Scope().Lookup("Pointer").(*TypeName), false)
for _, name := range Universe.Names() {
if obj, _ := Universe.Lookup(name).(*TypeName); obj != nil {
- check(obj, name == "byte" || name == "rune")
+ check(obj, name == "any" || name == "byte" || name == "rune")
}
}
diff --git a/src/cmd/compile/internal/types2/operand.go b/src/cmd/compile/internal/types2/operand.go
index 455d8b5dd1..19326b8342 100644
--- a/src/cmd/compile/internal/types2/operand.go
+++ b/src/cmd/compile/internal/types2/operand.go
@@ -176,16 +176,20 @@ func operandString(x *operand, qf Qualifier) string {
if hasType {
if x.typ != Typ[Invalid] {
var intro string
- switch {
- case isGeneric(x.typ):
- intro = " of generic type "
- case asTypeParam(x.typ) != nil:
- intro = " of type parameter type "
- default:
+ var tpar *TypeParam
+ if isGeneric(x.typ) {
+ intro = " of parameterized type "
+ } else if tpar = asTypeParam(x.typ); tpar != nil {
+ intro = " of type parameter "
+ } else {
intro = " of type "
}
buf.WriteString(intro)
WriteType(&buf, x.typ, qf)
+ if tpar != nil {
+ buf.WriteString(" constrained by ")
+ WriteType(&buf, tpar.bound, qf) // do not compute interface type sets here
+ }
} else {
buf.WriteString(" with invalid type")
}
@@ -248,20 +252,35 @@ func (x *operand) assignableTo(check *Checker, T Type, reason *string) (bool, er
V := x.typ
+ const debugAssignableTo = false
+ if debugAssignableTo && check != nil {
+ check.dump("V = %s", V)
+ check.dump("T = %s", T)
+ }
+
// x's type is identical to T
- if check.identical(V, T) {
+ if Identical(V, T) {
return true, 0
}
Vu := optype(V)
Tu := optype(T)
+ if debugAssignableTo && check != nil {
+ check.dump("Vu = %s", Vu)
+ check.dump("Tu = %s", Tu)
+ }
+
// x is an untyped value representable by a value of type T.
if isUntyped(Vu) {
- if t, ok := Tu.(*Sum); ok {
- return t.is(func(t Type) bool {
+ if t, ok := under(T).(*TypeParam); ok {
+ return t.is(func(t *term) bool {
// TODO(gri) this could probably be more efficient
- ok, _ := x.assignableTo(check, t, reason)
+ if t.tilde {
+ // TODO(gri) We need to check assignability
+ // for the underlying type of x.
+ }
+ ok, _ := x.assignableTo(check, t.typ, reason)
return ok
}), _IncompatibleAssign
}
@@ -272,7 +291,7 @@ func (x *operand) assignableTo(check *Checker, T Type, reason *string) (bool, er
// x's type V and T have identical underlying types
// and at least one of V or T is not a named type
- if check.identical(Vu, Tu) && (!isNamed(V) || !isNamed(T)) {
+ if Identical(Vu, Tu) && (!isNamed(V) || !isNamed(T)) {
return true, 0
}
@@ -281,7 +300,7 @@ func (x *operand) assignableTo(check *Checker, T Type, reason *string) (bool, er
if m, wrongType := check.missingMethod(V, Ti, true); m != nil /* Implements(V, Ti) */ {
if reason != nil {
if wrongType != nil {
- if check.identical(m.typ, wrongType.typ) {
+ if Identical(m.typ, wrongType.typ) {
*reason = fmt.Sprintf("missing method %s (%s has pointer receiver)", m.name, m.name)
} else {
*reason = fmt.Sprintf("wrong type for method %s (have %s, want %s)", m.Name(), wrongType.typ, m.typ)
@@ -300,7 +319,7 @@ func (x *operand) assignableTo(check *Checker, T Type, reason *string) (bool, er
// type, x's type V and T have identical element types,
// and at least one of V or T is not a named type
if Vc, ok := Vu.(*Chan); ok && Vc.dir == SendRecv {
- if Tc, ok := Tu.(*Chan); ok && check.identical(Vc.elem, Tc.elem) {
+ if Tc, ok := Tu.(*Chan); ok && Identical(Vc.elem, Tc.elem) {
return !isNamed(V) || !isNamed(T), _InvalidChanAssign
}
}
diff --git a/src/cmd/compile/internal/types2/package.go b/src/cmd/compile/internal/types2/package.go
index 31b1e71787..8044e7e6a7 100644
--- a/src/cmd/compile/internal/types2/package.go
+++ b/src/cmd/compile/internal/types2/package.go
@@ -13,8 +13,9 @@ type Package struct {
path string
name string
scope *Scope
- complete bool
imports []*Package
+ height int
+ complete bool
fake bool // scope lookup errors are silently dropped if package is fake (internal use only)
cgo bool // uses of this package will be rewritten into uses of declarations from _cgo_gotypes.go
}
@@ -22,8 +23,14 @@ type Package struct {
// NewPackage returns a new Package for the given package path and name.
// The package is not complete and contains no explicit imports.
func NewPackage(path, name string) *Package {
+ return NewPackageHeight(path, name, 0)
+}
+
+// NewPackageHeight is like NewPackage, but allows specifying the
+// package's height.
+func NewPackageHeight(path, name string, height int) *Package {
scope := NewScope(Universe, nopos, nopos, fmt.Sprintf("package %q", path))
- return &Package{path: path, name: name, scope: scope}
+ return &Package{path: path, name: name, scope: scope, height: height}
}
// Path returns the package path.
@@ -32,13 +39,22 @@ func (pkg *Package) Path() string { return pkg.path }
// Name returns the package name.
func (pkg *Package) Name() string { return pkg.name }
+// Height returns the package height.
+func (pkg *Package) Height() int { return pkg.height }
+
// SetName sets the package name.
func (pkg *Package) SetName(name string) { pkg.name = name }
// Scope returns the (complete or incomplete) package scope
// holding the objects declared at package level (TypeNames,
// Consts, Vars, and Funcs).
-func (pkg *Package) Scope() *Scope { return pkg.scope }
+// For a nil pkg receiver, Scope returns the Universe scope.
+func (pkg *Package) Scope() *Scope {
+ if pkg != nil {
+ return pkg.scope
+ }
+ return Universe
+}
// A package is complete if its scope contains (at least) all
// exported objects; otherwise it is incomplete.
diff --git a/src/cmd/compile/internal/types2/pointer.go b/src/cmd/compile/internal/types2/pointer.go
new file mode 100644
index 0000000000..63055fc6b0
--- /dev/null
+++ b/src/cmd/compile/internal/types2/pointer.go
@@ -0,0 +1,19 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package types2
+
+// A Pointer represents a pointer type.
+type Pointer struct {
+ base Type // element type
+}
+
+// NewPointer returns a new pointer type for the given element (base) type.
+func NewPointer(elem Type) *Pointer { return &Pointer{base: elem} }
+
+// Elem returns the element type for the given pointer p.
+func (p *Pointer) Elem() Type { return p.base }
+
+func (p *Pointer) Underlying() Type { return p }
+func (p *Pointer) String() string { return TypeString(p, nil) }
diff --git a/src/cmd/compile/internal/types2/predicates.go b/src/cmd/compile/internal/types2/predicates.go
index ae186a0b5d..3ccafef990 100644
--- a/src/cmd/compile/internal/types2/predicates.go
+++ b/src/cmd/compile/internal/types2/predicates.go
@@ -10,7 +10,7 @@ package types2
// isNamed may be called with types that are not fully set up.
func isNamed(typ Type) bool {
switch typ.(type) {
- case *Basic, *Named, *TypeParam, *instance:
+ case *Basic, *Named, *TypeParam:
return true
}
return false
@@ -21,15 +21,15 @@ func isNamed(typ Type) bool {
func isGeneric(typ Type) bool {
// A parameterized type is only instantiated if it doesn't have an instantiation already.
named, _ := typ.(*Named)
- return named != nil && named.obj != nil && named.tparams != nil && named.targs == nil
+ return named != nil && named.obj != nil && named.targs == nil && named.TParams() != nil
}
func is(typ Type, what BasicInfo) bool {
- switch t := optype(typ).(type) {
+ switch t := under(typ).(type) {
case *Basic:
return t.info&what != 0
- case *Sum:
- return t.is(func(typ Type) bool { return is(typ, what) })
+ case *TypeParam:
+ return t.underIs(func(t Type) bool { return is(t, what) })
}
return false
}
@@ -56,10 +56,7 @@ func isNumericOrString(typ Type) bool { return is(typ, IsNumeric|IsString) }
// are not fully set up.
func isTyped(typ Type) bool {
// isTyped is called with types that are not fully
- // set up. Must not call Basic()!
- // A *Named or *instance type is always typed, so
- // we only need to check if we have a true *Basic
- // type.
+ // set up. Must not call asBasic()!
t, _ := typ.(*Basic)
return t == nil || t.info&IsUntyped == 0
}
@@ -96,19 +93,7 @@ func comparable(T Type, seen map[Type]bool) bool {
}
seen[T] = true
- // If T is a type parameter not constrained by any type
- // list (i.e., it's underlying type is the top type),
- // T is comparable if it has the == method. Otherwise,
- // the underlying type "wins". For instance
- //
- // interface{ comparable; type []byte }
- //
- // is not comparable because []byte is not comparable.
- if t := asTypeParam(T); t != nil && optype(t) == theTop {
- return t.Bound().IsComparable()
- }
-
- switch t := optype(T).(type) {
+ switch t := under(T).(type) {
case *Basic:
// assume invalid types to be comparable
// to avoid follow-up errors
@@ -124,42 +109,25 @@ func comparable(T Type, seen map[Type]bool) bool {
return true
case *Array:
return comparable(t.elem, seen)
- case *Sum:
- pred := func(t Type) bool {
- return comparable(t, seen)
- }
- return t.is(pred)
case *TypeParam:
- return t.Bound().IsComparable()
+ return t.iface().IsComparable()
}
return false
}
// hasNil reports whether a type includes the nil value.
func hasNil(typ Type) bool {
- switch t := optype(typ).(type) {
+ switch t := under(typ).(type) {
case *Basic:
return t.kind == UnsafePointer
case *Slice, *Pointer, *Signature, *Interface, *Map, *Chan:
return true
- case *Sum:
- return t.is(hasNil)
+ case *TypeParam:
+ return t.underIs(hasNil)
}
return false
}
-// identical reports whether x and y are identical types.
-// Receivers of Signature types are ignored.
-func (check *Checker) identical(x, y Type) bool {
- return check.identical0(x, y, true, nil)
-}
-
-// identicalIgnoreTags reports whether x and y are identical types if tags are ignored.
-// Receivers of Signature types are ignored.
-func (check *Checker) identicalIgnoreTags(x, y Type) bool {
- return check.identical0(x, y, false, nil)
-}
-
// An ifacePair is a node in a stack of interface type pairs compared for identity.
type ifacePair struct {
x, y *Interface
@@ -171,11 +139,7 @@ func (p *ifacePair) identical(q *ifacePair) bool {
}
// For changes to this code the corresponding changes should be made to unifier.nify.
-func (check *Checker) identical0(x, y Type, cmpTags bool, p *ifacePair) bool {
- // types must be expanded for comparison
- x = expandf(x)
- y = expandf(y)
-
+func identical(x, y Type, cmpTags bool, p *ifacePair) bool {
if x == y {
return true
}
@@ -195,13 +159,13 @@ func (check *Checker) identical0(x, y Type, cmpTags bool, p *ifacePair) bool {
if y, ok := y.(*Array); ok {
// If one or both array lengths are unknown (< 0) due to some error,
// assume they are the same to avoid spurious follow-on errors.
- return (x.len < 0 || y.len < 0 || x.len == y.len) && check.identical0(x.elem, y.elem, cmpTags, p)
+ return (x.len < 0 || y.len < 0 || x.len == y.len) && identical(x.elem, y.elem, cmpTags, p)
}
case *Slice:
// Two slice types are identical if they have identical element types.
if y, ok := y.(*Slice); ok {
- return check.identical0(x.elem, y.elem, cmpTags, p)
+ return identical(x.elem, y.elem, cmpTags, p)
}
case *Struct:
@@ -216,7 +180,7 @@ func (check *Checker) identical0(x, y Type, cmpTags bool, p *ifacePair) bool {
if f.embedded != g.embedded ||
cmpTags && x.Tag(i) != y.Tag(i) ||
!f.sameId(g.pkg, g.name) ||
- !check.identical0(f.typ, g.typ, cmpTags, p) {
+ !identical(f.typ, g.typ, cmpTags, p) {
return false
}
}
@@ -227,7 +191,7 @@ func (check *Checker) identical0(x, y Type, cmpTags bool, p *ifacePair) bool {
case *Pointer:
// Two pointer types are identical if they have identical base types.
if y, ok := y.(*Pointer); ok {
- return check.identical0(x.base, y.base, cmpTags, p)
+ return identical(x.base, y.base, cmpTags, p)
}
case *Tuple:
@@ -238,7 +202,7 @@ func (check *Checker) identical0(x, y Type, cmpTags bool, p *ifacePair) bool {
if x != nil {
for i, v := range x.vars {
w := y.vars[i]
- if !check.identical0(v.typ, w.typ, cmpTags, p) {
+ if !identical(v.typ, w.typ, cmpTags, p) {
return false
}
}
@@ -256,49 +220,27 @@ func (check *Checker) identical0(x, y Type, cmpTags bool, p *ifacePair) bool {
// parameter names.
if y, ok := y.(*Signature); ok {
return x.variadic == y.variadic &&
- check.identicalTParams(x.tparams, y.tparams, cmpTags, p) &&
- check.identical0(x.params, y.params, cmpTags, p) &&
- check.identical0(x.results, y.results, cmpTags, p)
- }
-
- case *Sum:
- // Two sum types are identical if they contain the same types.
- // (Sum types always consist of at least two types. Also, the
- // the set (list) of types in a sum type consists of unique
- // types - each type appears exactly once. Thus, two sum types
- // must contain the same number of types to have chance of
- // being equal.
- if y, ok := y.(*Sum); ok && len(x.types) == len(y.types) {
- // Every type in x.types must be in y.types.
- // Quadratic algorithm, but probably good enough for now.
- // TODO(gri) we need a fast quick type ID/hash for all types.
- L:
- for _, x := range x.types {
- for _, y := range y.types {
- if Identical(x, y) {
- continue L // x is in y.types
- }
- }
- return false // x is not in y.types
- }
- return true
+ identicalTParams(x.TParams().list(), y.TParams().list(), cmpTags, p) &&
+ identical(x.params, y.params, cmpTags, p) &&
+ identical(x.results, y.results, cmpTags, p)
}
case *Interface:
+ // Two interface types are identical if they describe the same type sets.
+ // With the existing implementation restriction, this simplifies to:
+ //
// Two interface types are identical if they have the same set of methods with
- // the same names and identical function types. Lower-case method names from
- // different packages are always different. The order of the methods is irrelevant.
+ // the same names and identical function types, and if any type restrictions
+ // are the same. Lower-case method names from different packages are always
+ // different. The order of the methods is irrelevant.
if y, ok := y.(*Interface); ok {
- // If identical0 is called (indirectly) via an external API entry point
- // (such as Identical, IdenticalIgnoreTags, etc.), check is nil. But in
- // that case, interfaces are expected to be complete and lazy completion
- // here is not needed.
- if check != nil {
- check.completeInterface(nopos, x)
- check.completeInterface(nopos, y)
+ xset := x.typeSet()
+ yset := y.typeSet()
+ if !xset.terms.equal(yset.terms) {
+ return false
}
- a := x.allMethods
- b := y.allMethods
+ a := xset.methods
+ b := yset.methods
if len(a) == len(b) {
// Interface types are the only types where cycles can occur
// that are not "terminated" via named types; and such cycles
@@ -335,7 +277,7 @@ func (check *Checker) identical0(x, y Type, cmpTags bool, p *ifacePair) bool {
}
for i, f := range a {
g := b[i]
- if f.Id() != g.Id() || !check.identical0(f.typ, g.typ, cmpTags, q) {
+ if f.Id() != g.Id() || !identical(f.typ, g.typ, cmpTags, q) {
return false
}
}
@@ -346,20 +288,44 @@ func (check *Checker) identical0(x, y Type, cmpTags bool, p *ifacePair) bool {
case *Map:
// Two map types are identical if they have identical key and value types.
if y, ok := y.(*Map); ok {
- return check.identical0(x.key, y.key, cmpTags, p) && check.identical0(x.elem, y.elem, cmpTags, p)
+ return identical(x.key, y.key, cmpTags, p) && identical(x.elem, y.elem, cmpTags, p)
}
case *Chan:
// Two channel types are identical if they have identical value types
// and the same direction.
if y, ok := y.(*Chan); ok {
- return x.dir == y.dir && check.identical0(x.elem, y.elem, cmpTags, p)
+ return x.dir == y.dir && identical(x.elem, y.elem, cmpTags, p)
}
case *Named:
// Two named types are identical if their type names originate
// in the same type declaration.
if y, ok := y.(*Named); ok {
+ x.expand(nil)
+ y.expand(nil)
+
+ xargs := x.TArgs().list()
+ yargs := y.TArgs().list()
+
+ if len(xargs) != len(yargs) {
+ return false
+ }
+
+ if len(xargs) > 0 {
+ // Instances are identical if their original type and type arguments
+ // are identical.
+ if !Identical(x.orig, y.orig) {
+ return false
+ }
+ for i, xa := range xargs {
+ if !Identical(xa, yargs[i]) {
+ return false
+ }
+ }
+ return true
+ }
+
// TODO(gri) Why is x == y not sufficient? And if it is,
// we can just return false here because x == y
// is caught in the very beginning of this function.
@@ -369,13 +335,9 @@ func (check *Checker) identical0(x, y Type, cmpTags bool, p *ifacePair) bool {
case *TypeParam:
// nothing to do (x and y being equal is caught in the very beginning of this function)
- // case *instance:
- // unreachable since types are expanded
-
- case *bottom, *top:
- // Either both types are theBottom, or both are theTop in which
- // case the initial x == y check will have caught them. Otherwise
- // they are not identical.
+ case *top:
+ // Either both types are theTop in which case the initial x == y check
+ // will have caught them. Otherwise they are not identical.
case nil:
// avoid a crash in case of nil type
@@ -387,13 +349,13 @@ func (check *Checker) identical0(x, y Type, cmpTags bool, p *ifacePair) bool {
return false
}
-func (check *Checker) identicalTParams(x, y []*TypeName, cmpTags bool, p *ifacePair) bool {
+func identicalTParams(x, y []*TypeParam, cmpTags bool, p *ifacePair) bool {
if len(x) != len(y) {
return false
}
for i, x := range x {
y := y[i]
- if !check.identical0(x.typ.(*TypeParam).bound, y.typ.(*TypeParam).bound, cmpTags, p) {
+ if !identical(x.bound, y.bound, cmpTags, p) {
return false
}
}
diff --git a/src/cmd/compile/internal/types2/resolver.go b/src/cmd/compile/internal/types2/resolver.go
index fa30650bd4..018a20cfb2 100644
--- a/src/cmd/compile/internal/types2/resolver.go
+++ b/src/cmd/compile/internal/types2/resolver.go
@@ -196,6 +196,7 @@ func (check *Checker) importPackage(pos syntax.Pos, path, dir string) *Package {
// methods with receiver base type names.
func (check *Checker) collectObjects() {
pkg := check.pkg
+ pkg.height = 0
// pkgImports is the set of packages already imported by any package file seen
// so far. Used to avoid duplicate entries in pkg.imports. Allocate and populate
@@ -253,6 +254,15 @@ func (check *Checker) collectObjects() {
continue
}
+ if imp == Unsafe {
+ // typecheck ignores imports of package unsafe for
+ // calculating height.
+ // TODO(mdempsky): Revisit this. This seems fine, but I
+ // don't remember explicitly considering this case.
+ } else if h := imp.height + 1; h > pkg.height {
+ pkg.height = h
+ }
+
// local name overrides imported package name
name := imp.name
if s.LocalPkgName != nil {
@@ -265,7 +275,7 @@ func (check *Checker) collectObjects() {
}
if name == "init" {
- check.error(s.LocalPkgName, "cannot import package as init - init must be a func")
+ check.error(s, "cannot import package as init - init must be a func")
continue
}
@@ -298,22 +308,26 @@ func (check *Checker) collectObjects() {
check.dotImportMap = make(map[dotImportKey]*PkgName)
}
// merge imported scope with file scope
- for _, obj := range imp.scope.elems {
+ for name, obj := range imp.scope.elems {
+ // Note: Avoid eager resolve(name, obj) here, so we only
+ // resolve dot-imported objects as needed.
+
// A package scope may contain non-exported objects,
// do not import them!
- if obj.Exported() {
+ if isExported(name) {
// declare dot-imported object
// (Do not use check.declare because it modifies the object
// via Object.setScopePos, which leads to a race condition;
// the object may be imported into more than one file scope
// concurrently. See issue #32154.)
- if alt := fileScope.Insert(obj); alt != nil {
+ if alt := fileScope.Lookup(name); alt != nil {
var err error_
- err.errorf(s.LocalPkgName, "%s redeclared in this block", obj.Name())
+ err.errorf(s.LocalPkgName, "%s redeclared in this block", alt.Name())
err.recordAltDecl(alt)
check.report(&err)
} else {
- check.dotImportMap[dotImportKey{fileScope, obj}] = pkgName
+ fileScope.insert(name, obj)
+ check.dotImportMap[dotImportKey{fileScope, name}] = pkgName
}
}
}
@@ -459,8 +473,9 @@ func (check *Checker) collectObjects() {
// verify that objects in package and file scopes have different names
for _, scope := range fileScopes {
- for _, obj := range scope.elems {
- if alt := pkg.scope.Lookup(obj.Name()); alt != nil {
+ for name, obj := range scope.elems {
+ if alt := pkg.scope.Lookup(name); alt != nil {
+ obj = resolve(name, obj)
var err error_
if pkg, ok := obj.(*PkgName); ok {
err.errorf(alt, "%s already declared through import of %s", alt.Name(), pkg.Imported())
diff --git a/src/cmd/compile/internal/types2/resolver_test.go b/src/cmd/compile/internal/types2/resolver_test.go
index aee435ff5f..a02abce081 100644
--- a/src/cmd/compile/internal/types2/resolver_test.go
+++ b/src/cmd/compile/internal/types2/resolver_test.go
@@ -143,7 +143,7 @@ func TestResolveIdents(t *testing.T) {
// check that qualified identifiers are resolved
for _, f := range files {
- syntax.Walk(f, func(n syntax.Node) bool {
+ syntax.Crawl(f, func(n syntax.Node) bool {
if s, ok := n.(*syntax.SelectorExpr); ok {
if x, ok := s.X.(*syntax.Name); ok {
obj := uses[x]
@@ -177,7 +177,7 @@ func TestResolveIdents(t *testing.T) {
foundDefs := make(map[*syntax.Name]bool)
var both []string
for _, f := range files {
- syntax.Walk(f, func(n syntax.Node) bool {
+ syntax.Crawl(f, func(n syntax.Node) bool {
if x, ok := n.(*syntax.Name); ok {
var objects int
if _, found := uses[x]; found {
diff --git a/src/cmd/compile/internal/types2/sanitize.go b/src/cmd/compile/internal/types2/sanitize.go
deleted file mode 100644
index 64a2dedc7d..0000000000
--- a/src/cmd/compile/internal/types2/sanitize.go
+++ /dev/null
@@ -1,202 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package types2
-
-// sanitizeInfo walks the types contained in info to ensure that all instances
-// are expanded.
-//
-// This includes some objects that may be shared across concurrent
-// type-checking passes (such as those in the universe scope), so we are
-// careful here not to write types that are already sanitized. This avoids a
-// data race as any shared types should already be sanitized.
-func sanitizeInfo(info *Info) {
- var s sanitizer = make(map[Type]Type)
-
- // Note: Some map entries are not references.
- // If modified, they must be assigned back.
-
- for e, tv := range info.Types {
- if typ := s.typ(tv.Type); typ != tv.Type {
- tv.Type = typ
- info.Types[e] = tv
- }
- }
-
- for e, inf := range info.Inferred {
- changed := false
- for i, targ := range inf.Targs {
- if typ := s.typ(targ); typ != targ {
- inf.Targs[i] = typ
- changed = true
- }
- }
- if typ := s.typ(inf.Sig); typ != inf.Sig {
- inf.Sig = typ.(*Signature)
- changed = true
- }
- if changed {
- info.Inferred[e] = inf
- }
- }
-
- for _, obj := range info.Defs {
- if obj != nil {
- if typ := s.typ(obj.Type()); typ != obj.Type() {
- obj.setType(typ)
- }
- }
- }
-
- for _, obj := range info.Uses {
- if obj != nil {
- if typ := s.typ(obj.Type()); typ != obj.Type() {
- obj.setType(typ)
- }
- }
- }
-
- // TODO(gri) sanitize as needed
- // - info.Implicits
- // - info.Selections
- // - info.Scopes
- // - info.InitOrder
-}
-
-type sanitizer map[Type]Type
-
-func (s sanitizer) typ(typ Type) Type {
- if typ == nil {
- return nil
- }
-
- if t, found := s[typ]; found {
- return t
- }
- s[typ] = typ
-
- switch t := typ.(type) {
- case *Basic, *bottom, *top:
- // nothing to do
-
- case *Array:
- if elem := s.typ(t.elem); elem != t.elem {
- t.elem = elem
- }
-
- case *Slice:
- if elem := s.typ(t.elem); elem != t.elem {
- t.elem = elem
- }
-
- case *Struct:
- s.varList(t.fields)
-
- case *Pointer:
- if base := s.typ(t.base); base != t.base {
- t.base = base
- }
-
- case *Tuple:
- s.tuple(t)
-
- case *Signature:
- s.var_(t.recv)
- s.tuple(t.params)
- s.tuple(t.results)
-
- case *Sum:
- s.typeList(t.types)
-
- case *Interface:
- s.funcList(t.methods)
- if types := s.typ(t.types); types != t.types {
- t.types = types
- }
- s.typeList(t.embeddeds)
- s.funcList(t.allMethods)
- if allTypes := s.typ(t.allTypes); allTypes != t.allTypes {
- t.allTypes = allTypes
- }
-
- case *Map:
- if key := s.typ(t.key); key != t.key {
- t.key = key
- }
- if elem := s.typ(t.elem); elem != t.elem {
- t.elem = elem
- }
-
- case *Chan:
- if elem := s.typ(t.elem); elem != t.elem {
- t.elem = elem
- }
-
- case *Named:
- if orig := s.typ(t.fromRHS); orig != t.fromRHS {
- t.fromRHS = orig
- }
- if under := s.typ(t.underlying); under != t.underlying {
- t.underlying = under
- }
- s.typeList(t.targs)
- s.funcList(t.methods)
-
- case *TypeParam:
- if bound := s.typ(t.bound); bound != t.bound {
- t.bound = bound
- }
-
- case *instance:
- typ = t.expand()
- s[t] = typ
-
- default:
- panic("unimplemented")
- }
-
- return typ
-}
-
-func (s sanitizer) var_(v *Var) {
- if v != nil {
- if typ := s.typ(v.typ); typ != v.typ {
- v.typ = typ
- }
- }
-}
-
-func (s sanitizer) varList(list []*Var) {
- for _, v := range list {
- s.var_(v)
- }
-}
-
-func (s sanitizer) tuple(t *Tuple) {
- if t != nil {
- s.varList(t.vars)
- }
-}
-
-func (s sanitizer) func_(f *Func) {
- if f != nil {
- if typ := s.typ(f.typ); typ != f.typ {
- f.typ = typ
- }
- }
-}
-
-func (s sanitizer) funcList(list []*Func) {
- for _, f := range list {
- s.func_(f)
- }
-}
-
-func (s sanitizer) typeList(list []Type) {
- for i, t := range list {
- if typ := s.typ(t); typ != t {
- list[i] = typ
- }
- }
-}
diff --git a/src/cmd/compile/internal/types2/scope.go b/src/cmd/compile/internal/types2/scope.go
index ade0a79b31..095875d94b 100644
--- a/src/cmd/compile/internal/types2/scope.go
+++ b/src/cmd/compile/internal/types2/scope.go
@@ -13,6 +13,7 @@ import (
"io"
"sort"
"strings"
+ "sync"
)
// A Scope maintains a set of objects and links to its containing
@@ -22,6 +23,7 @@ import (
type Scope struct {
parent *Scope
children []*Scope
+ number int // parent.children[number-1] is this scope; 0 if there is no parent
elems map[string]Object // lazily allocated
pos, end syntax.Pos // scope extent; may be invalid
comment string // for debugging only
@@ -31,10 +33,11 @@ type Scope struct {
// NewScope returns a new, empty scope contained in the given parent
// scope, if any. The comment is for debugging only.
func NewScope(parent *Scope, pos, end syntax.Pos, comment string) *Scope {
- s := &Scope{parent, nil, nil, pos, end, comment, false}
+ s := &Scope{parent, nil, 0, nil, pos, end, comment, false}
// don't add children to Universe scope!
if parent != nil && parent != Universe {
parent.children = append(parent.children, s)
+ s.number = len(parent.children)
}
return s
}
@@ -66,7 +69,7 @@ func (s *Scope) Child(i int) *Scope { return s.children[i] }
// Lookup returns the object in scope s with the given name if such an
// object exists; otherwise the result is nil.
func (s *Scope) Lookup(name string) Object {
- return s.elems[name]
+ return resolve(name, s.elems[name])
}
// LookupParent follows the parent chain of scopes starting with s until
@@ -81,7 +84,7 @@ func (s *Scope) Lookup(name string) Object {
// whose scope is the scope of the package that exported them.
func (s *Scope) LookupParent(name string, pos syntax.Pos) (*Scope, Object) {
for ; s != nil; s = s.parent {
- if obj := s.elems[name]; obj != nil && (!pos.IsKnown() || obj.scopePos().Cmp(pos) <= 0) {
+ if obj := s.Lookup(name); obj != nil && (!pos.IsKnown() || obj.scopePos().Cmp(pos) <= 0) {
return s, obj
}
}
@@ -95,19 +98,38 @@ func (s *Scope) LookupParent(name string, pos syntax.Pos) (*Scope, Object) {
// if not already set, and returns nil.
func (s *Scope) Insert(obj Object) Object {
name := obj.Name()
- if alt := s.elems[name]; alt != nil {
+ if alt := s.Lookup(name); alt != nil {
return alt
}
- if s.elems == nil {
- s.elems = make(map[string]Object)
- }
- s.elems[name] = obj
+ s.insert(name, obj)
if obj.Parent() == nil {
obj.setParent(s)
}
return nil
}
+// InsertLazy is like Insert, but allows deferring construction of the
+// inserted object until it's accessed with Lookup. The Object
+// returned by resolve must have the same name as given to InsertLazy.
+// If s already contains an alternative object with the same name,
+// InsertLazy leaves s unchanged and returns false. Otherwise it
+// records the binding and returns true. The object's parent scope
+// will be set to s after resolve is called.
+func (s *Scope) InsertLazy(name string, resolve func() Object) bool {
+ if s.elems[name] != nil {
+ return false
+ }
+ s.insert(name, &lazyObject{parent: s, resolve: resolve})
+ return true
+}
+
+func (s *Scope) insert(name string, obj Object) {
+ if s.elems == nil {
+ s.elems = make(map[string]Object)
+ }
+ s.elems[name] = obj
+}
+
// Squash merges s with its parent scope p by adding all
// objects of s to p, adding all children of s to the
// children of p, and removing s from p's children.
@@ -117,7 +139,8 @@ func (s *Scope) Insert(obj Object) Object {
func (s *Scope) Squash(err func(obj, alt Object)) {
p := s.parent
assert(p != nil)
- for _, obj := range s.elems {
+ for name, obj := range s.elems {
+ obj = resolve(name, obj)
obj.setParent(nil)
if alt := p.Insert(obj); alt != nil {
err(obj, alt)
@@ -196,7 +219,7 @@ func (s *Scope) WriteTo(w io.Writer, n int, recurse bool) {
indn1 := indn + ind
for _, name := range s.Names() {
- fmt.Fprintf(w, "%s%s\n", indn1, s.elems[name])
+ fmt.Fprintf(w, "%s%s\n", indn1, s.Lookup(name))
}
if recurse {
@@ -214,3 +237,57 @@ func (s *Scope) String() string {
s.WriteTo(&buf, 0, false)
return buf.String()
}
+
+// A lazyObject represents an imported Object that has not been fully
+// resolved yet by its importer.
+type lazyObject struct {
+ parent *Scope
+ resolve func() Object
+ obj Object
+ once sync.Once
+}
+
+// resolve returns the Object represented by obj, resolving lazy
+// objects as appropriate.
+func resolve(name string, obj Object) Object {
+ if lazy, ok := obj.(*lazyObject); ok {
+ lazy.once.Do(func() {
+ obj := lazy.resolve()
+
+ if _, ok := obj.(*lazyObject); ok {
+ panic("recursive lazy object")
+ }
+ if obj.Name() != name {
+ panic("lazy object has unexpected name")
+ }
+
+ if obj.Parent() == nil {
+ obj.setParent(lazy.parent)
+ }
+ lazy.obj = obj
+ })
+
+ obj = lazy.obj
+ }
+ return obj
+}
+
+// stub implementations so *lazyObject implements Object and we can
+// store them directly into Scope.elems.
+func (*lazyObject) Parent() *Scope { panic("unreachable") }
+func (*lazyObject) Pos() syntax.Pos { panic("unreachable") }
+func (*lazyObject) Pkg() *Package { panic("unreachable") }
+func (*lazyObject) Name() string { panic("unreachable") }
+func (*lazyObject) Type() Type { panic("unreachable") }
+func (*lazyObject) Exported() bool { panic("unreachable") }
+func (*lazyObject) Id() string { panic("unreachable") }
+func (*lazyObject) String() string { panic("unreachable") }
+func (*lazyObject) order() uint32 { panic("unreachable") }
+func (*lazyObject) color() color { panic("unreachable") }
+func (*lazyObject) setType(Type) { panic("unreachable") }
+func (*lazyObject) setOrder(uint32) { panic("unreachable") }
+func (*lazyObject) setColor(color color) { panic("unreachable") }
+func (*lazyObject) setParent(*Scope) { panic("unreachable") }
+func (*lazyObject) sameId(pkg *Package, name string) bool { panic("unreachable") }
+func (*lazyObject) scopePos() syntax.Pos { panic("unreachable") }
+func (*lazyObject) setScopePos(pos syntax.Pos) { panic("unreachable") }
diff --git a/src/cmd/compile/internal/types2/signature.go b/src/cmd/compile/internal/types2/signature.go
new file mode 100644
index 0000000000..f1bf60ae8e
--- /dev/null
+++ b/src/cmd/compile/internal/types2/signature.go
@@ -0,0 +1,393 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package types2
+
+import (
+ "cmd/compile/internal/syntax"
+ "fmt"
+)
+
+// ----------------------------------------------------------------------------
+// API
+
+// A Signature represents a (non-builtin) function or method type.
+// The receiver is ignored when comparing signatures for identity.
+type Signature struct {
+ // We need to keep the scope in Signature (rather than passing it around
+ // and store it in the Func Object) because when type-checking a function
+ // literal we call the general type checker which returns a general Type.
+ // We then unpack the *Signature and use the scope for the literal body.
+ rparams *TParamList // receiver type parameters from left to right, or nil
+ tparams *TParamList // type parameters from left to right, or nil
+ scope *Scope // function scope, present for package-local signatures
+ recv *Var // nil if not a method
+ params *Tuple // (incoming) parameters from left to right; or nil
+ results *Tuple // (outgoing) results from left to right; or nil
+ variadic bool // true if the last parameter's type is of the form ...T (or string, for append built-in only)
+}
+
+// NewSignature returns a new function type for the given receiver, parameters,
+// and results, either of which may be nil. If variadic is set, the function
+// is variadic, it must have at least one parameter, and the last parameter
+// must be of unnamed slice type.
+func NewSignature(recv *Var, params, results *Tuple, variadic bool) *Signature {
+ if variadic {
+ n := params.Len()
+ if n == 0 {
+ panic("variadic function must have at least one parameter")
+ }
+ if _, ok := params.At(n - 1).typ.(*Slice); !ok {
+ panic("variadic parameter must be of unnamed slice type")
+ }
+ }
+ return &Signature{recv: recv, params: params, results: results, variadic: variadic}
+}
+
+// Recv returns the receiver of signature s (if a method), or nil if a
+// function. It is ignored when comparing signatures for identity.
+//
+// For an abstract method, Recv returns the enclosing interface either
+// as a *Named or an *Interface. Due to embedding, an interface may
+// contain methods whose receiver type is a different interface.
+func (s *Signature) Recv() *Var { return s.recv }
+
+// TParams returns the type parameters of signature s, or nil.
+func (s *Signature) TParams() *TParamList { return s.tparams }
+
+// SetTParams sets the type parameters of signature s.
+func (s *Signature) SetTParams(tparams []*TypeParam) { s.tparams = bindTParams(tparams) }
+
+// RParams returns the receiver type parameters of signature s, or nil.
+func (s *Signature) RParams() *TParamList { return s.rparams }
+
+// SetRParams sets the receiver type params of signature s.
+func (s *Signature) SetRParams(rparams []*TypeParam) { s.rparams = bindTParams(rparams) }
+
+// Params returns the parameters of signature s, or nil.
+func (s *Signature) Params() *Tuple { return s.params }
+
+// Results returns the results of signature s, or nil.
+func (s *Signature) Results() *Tuple { return s.results }
+
+// Variadic reports whether the signature s is variadic.
+func (s *Signature) Variadic() bool { return s.variadic }
+
+func (s *Signature) Underlying() Type { return s }
+func (s *Signature) String() string { return TypeString(s, nil) }
+
+// ----------------------------------------------------------------------------
+// Implementation
+
+// Disabled by default, but enabled when running tests (via types_test.go).
+var acceptMethodTypeParams bool
+
+// funcType type-checks a function or method type.
+func (check *Checker) funcType(sig *Signature, recvPar *syntax.Field, tparams []*syntax.Field, ftyp *syntax.FuncType) {
+ check.openScope(ftyp, "function")
+ check.scope.isFunc = true
+ check.recordScope(ftyp, check.scope)
+ sig.scope = check.scope
+ defer check.closeScope()
+
+ var recvTyp syntax.Expr // rewritten receiver type; valid if != nil
+ if recvPar != nil {
+ // collect generic receiver type parameters, if any
+ // - a receiver type parameter is like any other type parameter, except that it is declared implicitly
+ // - the receiver specification acts as local declaration for its type parameters, which may be blank
+ _, rname, rparams := check.unpackRecv(recvPar.Type, true)
+ if len(rparams) > 0 {
+ // Blank identifiers don't get declared and regular type-checking of the instantiated
+ // parameterized receiver type expression fails in Checker.collectParams of receiver.
+ // Identify blank type parameters and substitute each with a unique new identifier named
+ // "n_" (where n is the parameter index) and which cannot conflict with any user-defined
+ // name.
+ var smap map[*syntax.Name]*syntax.Name // substitution map from "_" to "!n" identifiers
+ for i, p := range rparams {
+ if p.Value == "_" {
+ new := *p
+ new.Value = fmt.Sprintf("%d_", i)
+ rparams[i] = &new // use n_ identifier instead of _ so it can be looked up
+ if smap == nil {
+ smap = make(map[*syntax.Name]*syntax.Name)
+ }
+ smap[p] = &new
+ }
+ }
+ if smap != nil {
+ // blank identifiers were found => use rewritten receiver type
+ recvTyp = isubst(recvPar.Type, smap)
+ }
+ rlist := make([]*TypeParam, len(rparams))
+ for i, rparam := range rparams {
+ rlist[i] = check.declareTypeParam(rparam)
+ }
+ sig.rparams = bindTParams(rlist)
+ // determine receiver type to get its type parameters
+ // and the respective type parameter bounds
+ var recvTParams []*TypeParam
+ if rname != nil {
+ // recv should be a Named type (otherwise an error is reported elsewhere)
+ // Also: Don't report an error via genericType since it will be reported
+ // again when we type-check the signature.
+ // TODO(gri) maybe the receiver should be marked as invalid instead?
+ if recv := asNamed(check.genericType(rname, false)); recv != nil {
+ recvTParams = recv.TParams().list()
+ }
+ }
+ // provide type parameter bounds
+ // - only do this if we have the right number (otherwise an error is reported elsewhere)
+ if sig.RParams().Len() == len(recvTParams) {
+ // We have a list of *TypeNames but we need a list of Types.
+ list := make([]Type, sig.RParams().Len())
+ for i, t := range sig.RParams().list() {
+ list[i] = t
+ }
+ smap := makeSubstMap(recvTParams, list)
+ for i, tpar := range sig.RParams().list() {
+ bound := recvTParams[i].bound
+ // bound is (possibly) parameterized in the context of the
+ // receiver type declaration. Substitute parameters for the
+ // current context.
+ // TODO(gri) should we assume now that bounds always exist?
+ // (no bound == empty interface)
+ if bound != nil {
+ bound = check.subst(tpar.obj.pos, bound, smap, nil)
+ tpar.bound = bound
+ }
+ }
+ }
+ }
+ }
+
+ if tparams != nil {
+ sig.tparams = check.collectTypeParams(tparams)
+ // Always type-check method type parameters but complain if they are not enabled.
+ // (A separate check is needed when type-checking interface method signatures because
+ // they don't have a receiver specification.)
+ if recvPar != nil && !acceptMethodTypeParams {
+ check.error(ftyp, "methods cannot have type parameters")
+ }
+ }
+
+ // Value (non-type) parameters' scope starts in the function body. Use a temporary scope for their
+ // declarations and then squash that scope into the parent scope (and report any redeclarations at
+ // that time).
+ scope := NewScope(check.scope, nopos, nopos, "function body (temp. scope)")
+ var recvList []*Var // TODO(gri) remove the need for making a list here
+ if recvPar != nil {
+ recvList, _ = check.collectParams(scope, []*syntax.Field{recvPar}, recvTyp, false) // use rewritten receiver type, if any
+ }
+ params, variadic := check.collectParams(scope, ftyp.ParamList, nil, true)
+ results, _ := check.collectParams(scope, ftyp.ResultList, nil, false)
+ scope.Squash(func(obj, alt Object) {
+ var err error_
+ err.errorf(obj, "%s redeclared in this block", obj.Name())
+ err.recordAltDecl(alt)
+ check.report(&err)
+ })
+
+ if recvPar != nil {
+ // recv parameter list present (may be empty)
+ // spec: "The receiver is specified via an extra parameter section preceding the
+ // method name. That parameter section must declare a single parameter, the receiver."
+ var recv *Var
+ switch len(recvList) {
+ case 0:
+ // error reported by resolver
+ recv = NewParam(nopos, nil, "", Typ[Invalid]) // ignore recv below
+ default:
+ // more than one receiver
+ check.error(recvList[len(recvList)-1].Pos(), "method must have exactly one receiver")
+ fallthrough // continue with first receiver
+ case 1:
+ recv = recvList[0]
+ }
+
+ // TODO(gri) We should delay rtyp expansion to when we actually need the
+ // receiver; thus all checks here should be delayed to later.
+ rtyp, _ := deref(recv.typ)
+
+ // spec: "The receiver type must be of the form T or *T where T is a type name."
+ // (ignore invalid types - error was reported before)
+ if rtyp != Typ[Invalid] {
+ var err string
+ switch T := rtyp.(type) {
+ case *Named:
+ T.expand(nil)
+ // spec: "The type denoted by T is called the receiver base type; it must not
+ // be a pointer or interface type and it must be declared in the same package
+ // as the method."
+ if T.obj.pkg != check.pkg {
+ err = "type not defined in this package"
+ if check.conf.CompilerErrorMessages {
+ check.errorf(recv.pos, "cannot define new methods on non-local type %s", recv.typ)
+ err = ""
+ }
+ } else {
+ // The underlying type of a receiver base type can be a type parameter;
+ // e.g. for methods with a generic receiver T[P] with type T[P any] P.
+ underIs(T, func(u Type) bool {
+ switch u := u.(type) {
+ case *Basic:
+ // unsafe.Pointer is treated like a regular pointer
+ if u.kind == UnsafePointer {
+ err = "unsafe.Pointer"
+ return false
+ }
+ case *Pointer, *Interface:
+ err = "pointer or interface type"
+ return false
+ }
+ return true
+ })
+ }
+ case *Basic:
+ err = "basic or unnamed type"
+ if check.conf.CompilerErrorMessages {
+ check.errorf(recv.pos, "cannot define new methods on non-local type %s", recv.typ)
+ err = ""
+ }
+ default:
+ check.errorf(recv.pos, "invalid receiver type %s", recv.typ)
+ }
+ if err != "" {
+ check.errorf(recv.pos, "invalid receiver type %s (%s)", recv.typ, err)
+ // ok to continue
+ }
+ }
+ sig.recv = recv
+ }
+
+ sig.params = NewTuple(params...)
+ sig.results = NewTuple(results...)
+ sig.variadic = variadic
+}
+
+// collectParams declares the parameters of list in scope and returns the corresponding
+// variable list. If type0 != nil, it is used instead of the first type in list.
+func (check *Checker) collectParams(scope *Scope, list []*syntax.Field, type0 syntax.Expr, variadicOk bool) (params []*Var, variadic bool) {
+ if list == nil {
+ return
+ }
+
+ var named, anonymous bool
+
+ var typ Type
+ var prev syntax.Expr
+ for i, field := range list {
+ ftype := field.Type
+ // type-check type of grouped fields only once
+ if ftype != prev {
+ prev = ftype
+ if i == 0 && type0 != nil {
+ ftype = type0
+ }
+ if t, _ := ftype.(*syntax.DotsType); t != nil {
+ ftype = t.Elem
+ if variadicOk && i == len(list)-1 {
+ variadic = true
+ } else {
+ check.softErrorf(t, "can only use ... with final parameter in list")
+ // ignore ... and continue
+ }
+ }
+ typ = check.varType(ftype)
+ }
+ // The parser ensures that f.Tag is nil and we don't
+ // care if a constructed AST contains a non-nil tag.
+ if field.Name != nil {
+ // named parameter
+ name := field.Name.Value
+ if name == "" {
+ check.error(field.Name, invalidAST+"anonymous parameter")
+ // ok to continue
+ }
+ par := NewParam(field.Name.Pos(), check.pkg, name, typ)
+ check.declare(scope, field.Name, par, scope.pos)
+ params = append(params, par)
+ named = true
+ } else {
+ // anonymous parameter
+ par := NewParam(field.Pos(), check.pkg, "", typ)
+ check.recordImplicit(field, par)
+ params = append(params, par)
+ anonymous = true
+ }
+ }
+
+ if named && anonymous {
+ check.error(list[0], invalidAST+"list contains both named and anonymous parameters")
+ // ok to continue
+ }
+
+ // For a variadic function, change the last parameter's type from T to []T.
+ // Since we type-checked T rather than ...T, we also need to retro-actively
+ // record the type for ...T.
+ if variadic {
+ last := params[len(params)-1]
+ last.typ = &Slice{elem: last.typ}
+ check.recordTypeAndValue(list[len(list)-1].Type, typexpr, last.typ, nil)
+ }
+
+ return
+}
+
+// isubst returns an x with identifiers substituted per the substitution map smap.
+// isubst only handles the case of (valid) method receiver type expressions correctly.
+func isubst(x syntax.Expr, smap map[*syntax.Name]*syntax.Name) syntax.Expr {
+ switch n := x.(type) {
+ case *syntax.Name:
+ if alt := smap[n]; alt != nil {
+ return alt
+ }
+ // case *syntax.StarExpr:
+ // X := isubst(n.X, smap)
+ // if X != n.X {
+ // new := *n
+ // new.X = X
+ // return &new
+ // }
+ case *syntax.Operation:
+ if n.Op == syntax.Mul && n.Y == nil {
+ X := isubst(n.X, smap)
+ if X != n.X {
+ new := *n
+ new.X = X
+ return &new
+ }
+ }
+ case *syntax.IndexExpr:
+ Index := isubst(n.Index, smap)
+ if Index != n.Index {
+ new := *n
+ new.Index = Index
+ return &new
+ }
+ case *syntax.ListExpr:
+ var elems []syntax.Expr
+ for i, elem := range n.ElemList {
+ new := isubst(elem, smap)
+ if new != elem {
+ if elems == nil {
+ elems = make([]syntax.Expr, len(n.ElemList))
+ copy(elems, n.ElemList)
+ }
+ elems[i] = new
+ }
+ }
+ if elems != nil {
+ new := *n
+ new.ElemList = elems
+ return &new
+ }
+ case *syntax.ParenExpr:
+ return isubst(n.X, smap) // no need to keep parentheses
+ default:
+ // Other receiver type expressions are invalid.
+ // It's fine to ignore those here as they will
+ // be checked elsewhere.
+ }
+ return x
+}
diff --git a/src/cmd/compile/internal/types2/sizeof_test.go b/src/cmd/compile/internal/types2/sizeof_test.go
index 236feb0404..5be369d843 100644
--- a/src/cmd/compile/internal/types2/sizeof_test.go
+++ b/src/cmd/compile/internal/types2/sizeof_test.go
@@ -26,15 +26,14 @@ func TestSizeof(t *testing.T) {
{Struct{}, 24, 48},
{Pointer{}, 8, 16},
{Tuple{}, 12, 24},
- {Signature{}, 44, 88},
- {Sum{}, 12, 24},
- {Interface{}, 60, 120},
+ {Signature{}, 28, 56},
+ {Union{}, 16, 32},
+ {Interface{}, 40, 80},
{Map{}, 16, 32},
{Chan{}, 12, 24},
- {Named{}, 68, 136},
+ {Named{}, 72, 136},
{TypeParam{}, 28, 48},
- {instance{}, 52, 96},
- {bottom{}, 0, 0},
+ {term{}, 12, 24},
{top{}, 0, 0},
// Objects
@@ -48,8 +47,9 @@ func TestSizeof(t *testing.T) {
{Nil{}, 56, 88},
// Misc
- {Scope{}, 56, 96},
+ {Scope{}, 60, 104},
{Package{}, 40, 80},
+ {_TypeSet{}, 28, 56},
}
for _, test := range tests {
diff --git a/src/cmd/compile/internal/types2/sizes.go b/src/cmd/compile/internal/types2/sizes.go
index aa0fbf40fc..6a3d19d8ea 100644
--- a/src/cmd/compile/internal/types2/sizes.go
+++ b/src/cmd/compile/internal/types2/sizes.go
@@ -48,7 +48,7 @@ type StdSizes struct {
func (s *StdSizes) Alignof(T Type) int64 {
// For arrays and structs, alignment is defined in terms
// of alignment of the elements and fields, respectively.
- switch t := optype(T).(type) {
+ switch t := under(T).(type) {
case *Array:
// spec: "For a variable x of array type: unsafe.Alignof(x)
// is the same as unsafe.Alignof(x[0]), but at least 1."
@@ -73,6 +73,8 @@ func (s *StdSizes) Alignof(T Type) int64 {
if t.Info()&IsString != 0 {
return s.WordSize
}
+ case *TypeParam, *Union:
+ unreachable()
}
a := s.Sizeof(T) // may be 0
// spec: "For a variable x of any type: unsafe.Alignof(x) is at least 1."
@@ -118,7 +120,7 @@ var basicSizes = [...]byte{
}
func (s *StdSizes) Sizeof(T Type) int64 {
- switch t := optype(T).(type) {
+ switch t := under(T).(type) {
case *Basic:
assert(isTyped(T))
k := t.kind
@@ -148,10 +150,10 @@ func (s *StdSizes) Sizeof(T Type) int64 {
}
offsets := s.Offsetsof(t.fields)
return offsets[n-1] + s.Sizeof(t.fields[n-1].typ)
- case *Sum:
- panic("Sizeof unimplemented for type sum")
case *Interface:
return s.WordSize * 2
+ case *TypeParam, *Union:
+ unreachable()
}
return s.WordSize // catch-all
}
diff --git a/src/cmd/compile/internal/types2/slice.go b/src/cmd/compile/internal/types2/slice.go
new file mode 100644
index 0000000000..9c22a6fb1b
--- /dev/null
+++ b/src/cmd/compile/internal/types2/slice.go
@@ -0,0 +1,19 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package types2
+
+// A Slice represents a slice type.
+type Slice struct {
+ elem Type
+}
+
+// NewSlice returns a new slice type for the given element type.
+func NewSlice(elem Type) *Slice { return &Slice{elem: elem} }
+
+// Elem returns the element type of slice s.
+func (s *Slice) Elem() Type { return s.elem }
+
+func (s *Slice) Underlying() Type { return s }
+func (s *Slice) String() string { return TypeString(s, nil) }
diff --git a/src/cmd/compile/internal/types2/stmt.go b/src/cmd/compile/internal/types2/stmt.go
index c3e646c80c..7865c2d4f4 100644
--- a/src/cmd/compile/internal/types2/stmt.go
+++ b/src/cmd/compile/internal/types2/stmt.go
@@ -14,7 +14,7 @@ import (
func (check *Checker) funcBody(decl *declInfo, name string, sig *Signature, body *syntax.BlockStmt, iota constant.Value) {
if check.conf.IgnoreFuncBodies {
- panic("internal error: function body not ignored")
+ panic("function body not ignored")
}
if check.conf.Trace {
@@ -64,7 +64,8 @@ func (check *Checker) funcBody(decl *declInfo, name string, sig *Signature, body
func (check *Checker) usage(scope *Scope) {
var unused []*Var
- for _, elem := range scope.elems {
+ for name, elem := range scope.elems {
+ elem = resolve(name, elem)
if v, _ := elem.(*Var); v != nil && !v.used {
unused = append(unused, v)
}
@@ -255,7 +256,7 @@ L:
// look for duplicate types for a given value
// (quadratic algorithm, but these lists tend to be very short)
for _, vt := range seen[val] {
- if check.identical(v.typ, vt.typ) {
+ if Identical(v.typ, vt.typ) {
var err error_
err.errorf(&v, "duplicate case %s in expression switch", &v)
err.errorf(vt.pos, "previous case")
@@ -281,7 +282,7 @@ L:
// look for duplicate types
// (quadratic algorithm, but type switches tend to be reasonably small)
for t, other := range seen {
- if T == nil && t == nil || T != nil && t != nil && check.identical(T, t) {
+ if T == nil && t == nil || T != nil && t != nil && Identical(T, t) {
// talk about "case" rather than "type" because of nil case
Ts := "nil"
if T != nil {
@@ -351,25 +352,33 @@ func (check *Checker) stmt(ctxt stmtContext, s syntax.Stmt) {
check.errorf(&x, "%s %s", &x, msg)
case *syntax.SendStmt:
- var ch, x operand
+ var ch, val operand
check.expr(&ch, s.Chan)
- check.expr(&x, s.Value)
- if ch.mode == invalid || x.mode == invalid {
+ check.expr(&val, s.Value)
+ if ch.mode == invalid || val.mode == invalid {
return
}
-
- tch := asChan(ch.typ)
- if tch == nil {
- check.errorf(s, invalidOp+"cannot send to non-chan type %s", ch.typ)
- return
- }
-
- if tch.dir == RecvOnly {
- check.errorf(s, invalidOp+"cannot send to receive-only type %s", tch)
+ var elem Type
+ if !underIs(ch.typ, func(u Type) bool {
+ uch, _ := u.(*Chan)
+ if uch == nil {
+ check.errorf(s, invalidOp+"cannot send to non-channel %s", &ch)
+ return false
+ }
+ if uch.dir == RecvOnly {
+ check.errorf(s, invalidOp+"cannot send to receive-only channel %s", &ch)
+ return false
+ }
+ if elem != nil && !Identical(uch.elem, elem) {
+ check.errorf(s, invalidOp+"channels of %s must have the same element type", &ch)
+ return false
+ }
+ elem = uch.elem
+ return true
+ }) {
return
}
-
- check.assignment(&x, tch.elem, "send")
+ check.assignment(&val, elem, "send")
case *syntax.AssignStmt:
lhs := unpackExpr(s.Lhs)
@@ -780,9 +789,9 @@ func (check *Checker) rangeStmt(inner stmtContext, s *syntax.ForStmt, rclause *s
// determine key/value types
var key, val Type
if x.mode != invalid {
+ // Ranging over a type parameter is permitted if it has a structural type.
typ := optype(x.typ)
if _, ok := typ.(*Chan); ok && sValue != nil {
- // TODO(gri) this also needs to happen for channels in generic variables
check.softErrorf(sValue, "range over %s permits only one iteration variable", &x)
// ok to continue
}
@@ -891,7 +900,7 @@ func isVarName(x syntax.Expr) bool {
// variables are used or present; this matters if we range over a generic
// type where not all keys or values are of the same type.
func rangeKeyVal(typ Type, wantKey, wantVal bool) (Type, Type, string) {
- switch typ := typ.(type) {
+ switch typ := arrayPtrDeref(typ).(type) {
case *Basic:
if isString(typ) {
return Typ[Int], universeRune, "" // use 'rune' name
@@ -900,10 +909,6 @@ func rangeKeyVal(typ Type, wantKey, wantVal bool) (Type, Type, string) {
return Typ[Int], typ.elem, ""
case *Slice:
return Typ[Int], typ.elem, ""
- case *Pointer:
- if typ := asArray(typ.base); typ != nil {
- return Typ[Int], typ.elem, ""
- }
case *Map:
return typ.key, typ.elem, ""
case *Chan:
@@ -912,32 +917,9 @@ func rangeKeyVal(typ Type, wantKey, wantVal bool) (Type, Type, string) {
msg = "receive from send-only channel"
}
return typ.elem, Typ[Invalid], msg
- case *Sum:
- first := true
- var key, val Type
- var msg string
- typ.is(func(t Type) bool {
- k, v, m := rangeKeyVal(under(t), wantKey, wantVal)
- if k == nil || m != "" {
- key, val, msg = k, v, m
- return false
- }
- if first {
- key, val, msg = k, v, m
- first = false
- return true
- }
- if wantKey && !Identical(key, k) {
- key, val, msg = nil, nil, "all possible values must have the same key type"
- return false
- }
- if wantVal && !Identical(val, v) {
- key, val, msg = nil, nil, "all possible values must have the same element type"
- return false
- }
- return true
- })
- return key, val, msg
+ case *top:
+ // we have a type parameter with no structural type
+ return nil, nil, "no structural type"
}
return nil, nil, ""
}
diff --git a/src/cmd/compile/internal/types2/struct.go b/src/cmd/compile/internal/types2/struct.go
new file mode 100644
index 0000000000..f0c27c0150
--- /dev/null
+++ b/src/cmd/compile/internal/types2/struct.go
@@ -0,0 +1,213 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package types2
+
+import (
+ "cmd/compile/internal/syntax"
+ "strconv"
+)
+
+// ----------------------------------------------------------------------------
+// API
+
+// A Struct represents a struct type.
+type Struct struct {
+ fields []*Var
+ tags []string // field tags; nil if there are no tags
+}
+
+// NewStruct returns a new struct with the given fields and corresponding field tags.
+// If a field with index i has a tag, tags[i] must be that tag, but len(tags) may be
+// only as long as required to hold the tag with the largest index i. Consequently,
+// if no field has a tag, tags may be nil.
+func NewStruct(fields []*Var, tags []string) *Struct {
+ var fset objset
+ for _, f := range fields {
+ if f.name != "_" && fset.insert(f) != nil {
+ panic("multiple fields with the same name")
+ }
+ }
+ if len(tags) > len(fields) {
+ panic("more tags than fields")
+ }
+ return &Struct{fields: fields, tags: tags}
+}
+
+// NumFields returns the number of fields in the struct (including blank and embedded fields).
+func (s *Struct) NumFields() int { return len(s.fields) }
+
+// Field returns the i'th field for 0 <= i < NumFields().
+func (s *Struct) Field(i int) *Var { return s.fields[i] }
+
+// Tag returns the i'th field tag for 0 <= i < NumFields().
+func (s *Struct) Tag(i int) string {
+ if i < len(s.tags) {
+ return s.tags[i]
+ }
+ return ""
+}
+
+func (s *Struct) Underlying() Type { return s }
+func (s *Struct) String() string { return TypeString(s, nil) }
+
+// ----------------------------------------------------------------------------
+// Implementation
+
+func (check *Checker) structType(styp *Struct, e *syntax.StructType) {
+ if e.FieldList == nil {
+ return
+ }
+
+ // struct fields and tags
+ var fields []*Var
+ var tags []string
+
+ // for double-declaration checks
+ var fset objset
+
+ // current field typ and tag
+ var typ Type
+ var tag string
+ add := func(ident *syntax.Name, embedded bool, pos syntax.Pos) {
+ if tag != "" && tags == nil {
+ tags = make([]string, len(fields))
+ }
+ if tags != nil {
+ tags = append(tags, tag)
+ }
+
+ name := ident.Value
+ fld := NewField(pos, check.pkg, name, typ, embedded)
+ // spec: "Within a struct, non-blank field names must be unique."
+ if name == "_" || check.declareInSet(&fset, pos, fld) {
+ fields = append(fields, fld)
+ check.recordDef(ident, fld)
+ }
+ }
+
+ // addInvalid adds an embedded field of invalid type to the struct for
+ // fields with errors; this keeps the number of struct fields in sync
+ // with the source as long as the fields are _ or have different names
+ // (issue #25627).
+ addInvalid := func(ident *syntax.Name, pos syntax.Pos) {
+ typ = Typ[Invalid]
+ tag = ""
+ add(ident, true, pos)
+ }
+
+ var prev syntax.Expr
+ for i, f := range e.FieldList {
+ // Fields declared syntactically with the same type (e.g.: a, b, c T)
+ // share the same type expression. Only check type if it's a new type.
+ if i == 0 || f.Type != prev {
+ typ = check.varType(f.Type)
+ prev = f.Type
+ }
+ tag = ""
+ if i < len(e.TagList) {
+ tag = check.tag(e.TagList[i])
+ }
+ if f.Name != nil {
+ // named field
+ add(f.Name, false, f.Name.Pos())
+ } else {
+ // embedded field
+ // spec: "An embedded type must be specified as a type name T or as a
+ // pointer to a non-interface type name *T, and T itself may not be a
+ // pointer type."
+ pos := syntax.StartPos(f.Type)
+ name := embeddedFieldIdent(f.Type)
+ if name == nil {
+ check.errorf(pos, "invalid embedded field type %s", f.Type)
+ name = &syntax.Name{Value: "_"} // TODO(gri) need to set position to pos
+ addInvalid(name, pos)
+ continue
+ }
+ add(name, true, pos)
+
+ // Because we have a name, typ must be of the form T or *T, where T is the name
+ // of a (named or alias) type, and t (= deref(typ)) must be the type of T.
+ // We must delay this check to the end because we don't want to instantiate
+ // (via under(t)) a possibly incomplete type.
+ embeddedTyp := typ // for closure below
+ embeddedPos := pos
+ check.later(func() {
+ t, isPtr := deref(embeddedTyp)
+ switch t := under(t).(type) {
+ case *Basic:
+ if t == Typ[Invalid] {
+ // error was reported before
+ return
+ }
+ // unsafe.Pointer is treated like a regular pointer
+ if t.kind == UnsafePointer {
+ check.error(embeddedPos, "embedded field type cannot be unsafe.Pointer")
+ }
+ case *Pointer:
+ check.error(embeddedPos, "embedded field type cannot be a pointer")
+ case *TypeParam:
+ check.error(embeddedPos, "embedded field type cannot be a (pointer to a) type parameter")
+ case *Interface:
+ if isPtr {
+ check.error(embeddedPos, "embedded field type cannot be a pointer to an interface")
+ }
+ }
+ })
+ }
+ }
+
+ styp.fields = fields
+ styp.tags = tags
+}
+
+func embeddedFieldIdent(e syntax.Expr) *syntax.Name {
+ switch e := e.(type) {
+ case *syntax.Name:
+ return e
+ case *syntax.Operation:
+ if base := ptrBase(e); base != nil {
+ // *T is valid, but **T is not
+ if op, _ := base.(*syntax.Operation); op == nil || ptrBase(op) == nil {
+ return embeddedFieldIdent(e.X)
+ }
+ }
+ case *syntax.SelectorExpr:
+ return e.Sel
+ case *syntax.IndexExpr:
+ return embeddedFieldIdent(e.X)
+ }
+ return nil // invalid embedded field
+}
+
+func (check *Checker) declareInSet(oset *objset, pos syntax.Pos, obj Object) bool {
+ if alt := oset.insert(obj); alt != nil {
+ var err error_
+ err.errorf(pos, "%s redeclared", obj.Name())
+ err.recordAltDecl(alt)
+ check.report(&err)
+ return false
+ }
+ return true
+}
+
+func (check *Checker) tag(t *syntax.BasicLit) string {
+ // If t.Bad, an error was reported during parsing.
+ if t != nil && !t.Bad {
+ if t.Kind == syntax.StringLit {
+ if val, err := strconv.Unquote(t.Value); err == nil {
+ return val
+ }
+ }
+ check.errorf(t, invalidAST+"incorrect tag syntax: %q", t.Value)
+ }
+ return ""
+}
+
+func ptrBase(x *syntax.Operation) syntax.Expr {
+ if x.Op == syntax.Mul && x.Y == nil {
+ return x.X
+ }
+ return nil
+}
diff --git a/src/cmd/compile/internal/types2/subst.go b/src/cmd/compile/internal/types2/subst.go
index c8e428c183..918e5f3043 100644
--- a/src/cmd/compile/internal/types2/subst.go
+++ b/src/cmd/compile/internal/types2/subst.go
@@ -2,215 +2,46 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// This file implements instantiation of generic types
-// through substitution of type parameters by actual
-// types.
+// This file implements type parameter substitution.
package types2
import (
"bytes"
"cmd/compile/internal/syntax"
- "fmt"
)
-type substMap struct {
- // The targs field is currently needed for *Named type substitution.
- // TODO(gri) rewrite that code, get rid of this field, and make this
- // struct just the map (proj)
- targs []Type
- proj map[*TypeParam]Type
-}
+type substMap map[*TypeParam]Type
// makeSubstMap creates a new substitution map mapping tpars[i] to targs[i].
// If targs[i] is nil, tpars[i] is not substituted.
-func makeSubstMap(tpars []*TypeName, targs []Type) *substMap {
+func makeSubstMap(tpars []*TypeParam, targs []Type) substMap {
assert(len(tpars) == len(targs))
- proj := make(map[*TypeParam]Type, len(tpars))
+ proj := make(substMap, len(tpars))
for i, tpar := range tpars {
- // We must expand type arguments otherwise *instance
- // types end up as components in composite types.
- // TODO(gri) explain why this causes problems, if it does
- targ := expand(targs[i]) // possibly nil
- targs[i] = targ
- proj[tpar.typ.(*TypeParam)] = targ
+ proj[tpar] = targs[i]
}
- return &substMap{targs, proj}
-}
-
-func (m *substMap) String() string {
- return fmt.Sprintf("%s", m.proj)
+ return proj
}
-func (m *substMap) empty() bool {
- return len(m.proj) == 0
+func (m substMap) empty() bool {
+ return len(m) == 0
}
-func (m *substMap) lookup(tpar *TypeParam) Type {
- if t := m.proj[tpar]; t != nil {
+func (m substMap) lookup(tpar *TypeParam) Type {
+ if t := m[tpar]; t != nil {
return t
}
return tpar
}
-func (check *Checker) instantiate(pos syntax.Pos, typ Type, targs []Type, poslist []syntax.Pos) (res Type) {
- if check.conf.Trace {
- check.trace(pos, "-- instantiating %s with %s", typ, typeListString(targs))
- check.indent++
- defer func() {
- check.indent--
- var under Type
- if res != nil {
- // Calling under() here may lead to endless instantiations.
- // Test case: type T[P any] T[P]
- // TODO(gri) investigate if that's a bug or to be expected.
- under = res.Underlying()
- }
- check.trace(pos, "=> %s (under = %s)", res, under)
- }()
- }
-
- assert(len(poslist) <= len(targs))
-
- // TODO(gri) What is better here: work with TypeParams, or work with TypeNames?
- var tparams []*TypeName
- switch t := typ.(type) {
- case *Named:
- tparams = t.tparams
- case *Signature:
- tparams = t.tparams
- defer func() {
- // If we had an unexpected failure somewhere don't panic below when
- // asserting res.(*Signature). Check for *Signature in case Typ[Invalid]
- // is returned.
- if _, ok := res.(*Signature); !ok {
- return
- }
- // If the signature doesn't use its type parameters, subst
- // will not make a copy. In that case, make a copy now (so
- // we can set tparams to nil w/o causing side-effects).
- if t == res {
- copy := *t
- res = &copy
- }
- // After instantiating a generic signature, it is not generic
- // anymore; we need to set tparams to nil.
- res.(*Signature).tparams = nil
- }()
-
- default:
- check.dump("%v: cannot instantiate %v", pos, typ)
- unreachable() // only defined types and (defined) functions can be generic
-
- }
-
- // the number of supplied types must match the number of type parameters
- if len(targs) != len(tparams) {
- // TODO(gri) provide better error message
- check.errorf(pos, "got %d arguments but %d type parameters", len(targs), len(tparams))
- return Typ[Invalid]
- }
-
- if len(tparams) == 0 {
- return typ // nothing to do (minor optimization)
- }
-
- smap := makeSubstMap(tparams, targs)
-
- // check bounds
- for i, tname := range tparams {
- tpar := tname.typ.(*TypeParam)
- iface := tpar.Bound()
- if iface.Empty() {
- continue // no type bound
- }
-
- targ := targs[i]
-
- // best position for error reporting
- pos := pos
- if i < len(poslist) {
- pos = poslist[i]
- }
-
- // The type parameter bound is parameterized with the same type parameters
- // as the instantiated type; before we can use it for bounds checking we
- // need to instantiate it with the type arguments with which we instantiate
- // the parameterized type.
- iface = check.subst(pos, iface, smap).(*Interface)
-
- // targ must implement iface (methods)
- // - check only if we have methods
- check.completeInterface(nopos, iface)
- if len(iface.allMethods) > 0 {
- // If the type argument is a pointer to a type parameter, the type argument's
- // method set is empty.
- // TODO(gri) is this what we want? (spec question)
- if base, isPtr := deref(targ); isPtr && asTypeParam(base) != nil {
- check.errorf(pos, "%s has no methods", targ)
- break
- }
- if m, wrong := check.missingMethod(targ, iface, true); m != nil {
- // TODO(gri) needs to print updated name to avoid major confusion in error message!
- // (print warning for now)
- // Old warning:
- // check.softErrorf(pos, "%s does not satisfy %s (warning: name not updated) = %s (missing method %s)", targ, tpar.bound, iface, m)
- if m.name == "==" {
- // We don't want to report "missing method ==".
- check.softErrorf(pos, "%s does not satisfy comparable", targ)
- } else if wrong != nil {
- // TODO(gri) This can still report uninstantiated types which makes the error message
- // more difficult to read then necessary.
- check.softErrorf(pos,
- "%s does not satisfy %s: wrong method signature\n\tgot %s\n\twant %s",
- targ, tpar.bound, wrong, m,
- )
- } else {
- check.softErrorf(pos, "%s does not satisfy %s (missing method %s)", targ, tpar.bound, m.name)
- }
- break
- }
- }
-
- // targ's underlying type must also be one of the interface types listed, if any
- if iface.allTypes == nil {
- continue // nothing to do
- }
-
- // If targ is itself a type parameter, each of its possible types, but at least one, must be in the
- // list of iface types (i.e., the targ type list must be a non-empty subset of the iface types).
- if targ := asTypeParam(targ); targ != nil {
- targBound := targ.Bound()
- if targBound.allTypes == nil {
- check.softErrorf(pos, "%s does not satisfy %s (%s has no type constraints)", targ, tpar.bound, targ)
- break
- }
- for _, t := range unpack(targBound.allTypes) {
- if !iface.isSatisfiedBy(t) {
- // TODO(gri) match this error message with the one below (or vice versa)
- check.softErrorf(pos, "%s does not satisfy %s (%s type constraint %s not found in %s)", targ, tpar.bound, targ, t, iface.allTypes)
- break
- }
- }
- break
- }
-
- // Otherwise, targ's type or underlying type must also be one of the interface types listed, if any.
- if !iface.isSatisfiedBy(targ) {
- check.softErrorf(pos, "%s does not satisfy %s (%s not found in %s)", targ, tpar.bound, under(targ), iface.allTypes)
- break
- }
- }
-
- return check.subst(pos, typ, smap)
-}
-
-// subst returns the type typ with its type parameters tpars replaced by
-// the corresponding type arguments targs, recursively.
-// subst is functional in the sense that it doesn't modify the incoming
-// type. If a substitution took place, the result type is different from
-// from the incoming type.
-func (check *Checker) subst(pos syntax.Pos, typ Type, smap *substMap) Type {
+// subst returns the type typ with its type parameters tpars replaced by the
+// corresponding type arguments targs, recursively. subst doesn't modify the
+// incoming type. If a substitution took place, the result type is different
+// from from the incoming type.
+//
+// If the given typMap is non-nil, it is used in lieu of check.typMap.
+func (check *Checker) subst(pos syntax.Pos, typ Type, smap substMap, typMap map[string]*Named) Type {
if smap.empty() {
return typ
}
@@ -224,15 +55,33 @@ func (check *Checker) subst(pos syntax.Pos, typ Type, smap *substMap) Type {
}
// general case
- subst := subster{check, pos, make(map[Type]Type), smap}
+ var subst subster
+ subst.pos = pos
+ subst.smap = smap
+
+ if check != nil {
+ subst.check = check
+ if typMap == nil {
+ typMap = check.typMap
+ }
+ }
+ if typMap == nil {
+ // If we don't have a *Checker and its global type map,
+ // use a local version. Besides avoiding duplicate work,
+ // the type map prevents infinite recursive substitution
+ // for recursive types (example: type T[P any] *T[P]).
+ typMap = make(map[string]*Named)
+ }
+ subst.typMap = typMap
+
return subst.typ(typ)
}
type subster struct {
- check *Checker
- pos syntax.Pos
- cache map[Type]Type
- smap *substMap
+ pos syntax.Pos
+ smap substMap
+ check *Checker // nil if called via Instantiate
+ typMap map[string]*Named
}
func (subst *subster) typ(typ Type) Type {
@@ -241,7 +90,7 @@ func (subst *subster) typ(typ Type) Type {
// Call typOrNil if it's possible that typ is nil.
panic("nil typ")
- case *Basic, *bottom, *top:
+ case *Basic, *top:
// nothing to do
case *Array:
@@ -290,29 +139,20 @@ func (subst *subster) typ(typ Type) Type {
}
}
- case *Sum:
- types, copied := subst.typeList(t.types)
+ case *Union:
+ terms, copied := subst.termlist(t.terms)
if copied {
- // Don't do it manually, with a Sum literal: the new
- // types list may not be unique and NewSum may remove
- // duplicates.
- return NewSum(types)
+ // term list substitution may introduce duplicate terms (unlikely but possible).
+ // This is ok; lazy type set computation will determine the actual type set
+ // in normal form.
+ return &Union{terms, nil}
}
case *Interface:
methods, mcopied := subst.funcList(t.methods)
- types := t.types
- if t.types != nil {
- types = subst.typ(t.types)
- }
embeddeds, ecopied := subst.typeList(t.embeddeds)
- if mcopied || types != t.types || ecopied {
- iface := &Interface{methods: methods, types: types, embeddeds: embeddeds}
- if subst.check == nil {
- panic("internal error: cannot instantiate interfaces yet")
- }
- subst.check.posMap[iface] = subst.check.posMap[t] // satisfy completeInterface requirement
- subst.check.completeInterface(nopos, iface)
+ if mcopied || ecopied {
+ iface := &Interface{methods: methods, embeddeds: embeddeds, complete: t.complete}
return iface
}
@@ -342,77 +182,70 @@ func (subst *subster) typ(typ Type) Type {
}
}
- if t.tparams == nil {
+ if t.TParams().Len() == 0 {
dump(">>> %s is not parameterized", t)
return t // type is not parameterized
}
- var new_targs []Type
-
- if len(t.targs) > 0 {
- // already instantiated
- dump(">>> %s already instantiated", t)
- assert(len(t.targs) == len(t.tparams))
- // For each (existing) type argument targ, determine if it needs
- // to be substituted; i.e., if it is or contains a type parameter
- // that has a type argument for it.
- for i, targ := range t.targs {
- dump(">>> %d targ = %s", i, targ)
- new_targ := subst.typ(targ)
- if new_targ != targ {
- dump(">>> substituted %d targ %s => %s", i, targ, new_targ)
- if new_targs == nil {
- new_targs = make([]Type, len(t.tparams))
- copy(new_targs, t.targs)
- }
- new_targs[i] = new_targ
+ var newTArgs []Type
+ assert(t.targs.Len() == t.TParams().Len())
+
+ // already instantiated
+ dump(">>> %s already instantiated", t)
+ // For each (existing) type argument targ, determine if it needs
+ // to be substituted; i.e., if it is or contains a type parameter
+ // that has a type argument for it.
+ for i, targ := range t.targs.list() {
+ dump(">>> %d targ = %s", i, targ)
+ new_targ := subst.typ(targ)
+ if new_targ != targ {
+ dump(">>> substituted %d targ %s => %s", i, targ, new_targ)
+ if newTArgs == nil {
+ newTArgs = make([]Type, t.TParams().Len())
+ copy(newTArgs, t.targs.list())
}
+ newTArgs[i] = new_targ
}
+ }
- if new_targs == nil {
- dump(">>> nothing to substitute in %s", t)
- return t // nothing to substitute
- }
- } else {
- // not yet instantiated
- dump(">>> first instantiation of %s", t)
- new_targs = subst.smap.targs
+ if newTArgs == nil {
+ dump(">>> nothing to substitute in %s", t)
+ return t // nothing to substitute
}
// before creating a new named type, check if we have this one already
- h := instantiatedHash(t, new_targs)
+ h := instantiatedHash(t, newTArgs)
dump(">>> new type hash: %s", h)
- if subst.check != nil {
- if named, found := subst.check.typMap[h]; found {
- dump(">>> found %s", named)
- subst.cache[t] = named
- return named
- }
+ if named, found := subst.typMap[h]; found {
+ dump(">>> found %s", named)
+ return named
}
- // create a new named type and populate caches to avoid endless recursion
+ // Create a new named type and populate typMap to avoid endless recursion.
+ // The position used here is irrelevant because validation only occurs on t
+ // (we don't call validType on named), but we use subst.pos to help with
+ // debugging.
tname := NewTypeName(subst.pos, t.obj.pkg, t.obj.name, nil)
- named := subst.check.newNamed(tname, t, t.underlying, t.tparams, t.methods) // method signatures are updated lazily
- named.targs = new_targs
- if subst.check != nil {
- subst.check.typMap[h] = named
- }
- subst.cache[t] = named
+ t.load()
+ // It's ok to provide a nil *Checker because the newly created type
+ // doesn't need to be (lazily) expanded; it's expanded below.
+ named := (*Checker)(nil).newNamed(tname, t.orig, nil, t.tparams, t.methods) // t is loaded, so tparams and methods are available
+ named.targs = NewTypeList(newTArgs)
+ subst.typMap[h] = named
+ t.expand(subst.typMap) // must happen after typMap update to avoid infinite recursion
// do the substitution
- dump(">>> subst %s with %s (new: %s)", t.underlying, subst.smap, new_targs)
+ dump(">>> subst %s with %s (new: %s)", t.underlying, subst.smap, newTArgs)
named.underlying = subst.typOrNil(t.underlying)
- named.fromRHS = named.underlying // for cycle detection (Checker.validType)
+ dump(">>> underlying: %v", named.underlying)
+ assert(named.underlying != nil)
+ named.fromRHS = named.underlying // for consistency, though no cycle detection is necessary
return named
case *TypeParam:
return subst.smap.lookup(t)
- case *instance:
- // TODO(gri) can we avoid the expansion here and just substitute the type parameters?
- return subst.typ(t.expand())
-
default:
unimplemented()
}
@@ -420,14 +253,17 @@ func (subst *subster) typ(typ Type) Type {
return typ
}
-// TODO(gri) Eventually, this should be more sophisticated.
-// It won't work correctly for locally declared types.
+var instanceHashing = 0
+
func instantiatedHash(typ *Named, targs []Type) string {
+ assert(instanceHashing == 0)
+ instanceHashing++
var buf bytes.Buffer
writeTypeName(&buf, typ.obj, nil)
buf.WriteByte('[')
writeTypeList(&buf, targs, nil, nil)
buf.WriteByte(']')
+ instanceHashing--
// With respect to the represented type, whether a
// type is fully expanded or stored as instance
@@ -545,3 +381,21 @@ func (subst *subster) typeList(in []Type) (out []Type, copied bool) {
}
return
}
+
+func (subst *subster) termlist(in []*Term) (out []*Term, copied bool) {
+ out = in
+ for i, t := range in {
+ if u := subst.typ(t.typ); u != t.typ {
+ if !copied {
+ // first function that got substituted => allocate new out slice
+ // and copy all functions
+ new := make([]*Term, len(in))
+ copy(new, out)
+ out = new
+ copied = true
+ }
+ out[i] = NewTerm(t.tilde, u)
+ }
+ }
+ return
+}
diff --git a/src/cmd/compile/internal/types2/termlist.go b/src/cmd/compile/internal/types2/termlist.go
new file mode 100644
index 0000000000..378ba6b8f4
--- /dev/null
+++ b/src/cmd/compile/internal/types2/termlist.go
@@ -0,0 +1,167 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package types2
+
+import "bytes"
+
+// A termlist represents the type set represented by the union
+// t1 ∪ y2 ∪ ... tn of the type sets of the terms t1 to tn.
+// A termlist is in normal form if all terms are disjoint.
+// termlist operations don't require the operands to be in
+// normal form.
+type termlist []*term
+
+// allTermlist represents the set of all types.
+// It is in normal form.
+var allTermlist = termlist{new(term)}
+
+// String prints the termlist exactly (without normalization).
+func (xl termlist) String() string {
+ if len(xl) == 0 {
+ return "∅"
+ }
+ var buf bytes.Buffer
+ for i, x := range xl {
+ if i > 0 {
+ buf.WriteString(" ∪ ")
+ }
+ buf.WriteString(x.String())
+ }
+ return buf.String()
+}
+
+// isEmpty reports whether the termlist xl represents the empty set of types.
+func (xl termlist) isEmpty() bool {
+ // If there's a non-nil term, the entire list is not empty.
+ // If the termlist is in normal form, this requires at most
+ // one iteration.
+ for _, x := range xl {
+ if x != nil {
+ return false
+ }
+ }
+ return true
+}
+
+// isAll reports whether the termlist xl represents the set of all types.
+func (xl termlist) isAll() bool {
+ // If there's a 𝓤 term, the entire list is 𝓤.
+ // If the termlist is in normal form, this requires at most
+ // one iteration.
+ for _, x := range xl {
+ if x != nil && x.typ == nil {
+ return true
+ }
+ }
+ return false
+}
+
+// norm returns the normal form of xl.
+func (xl termlist) norm() termlist {
+ // Quadratic algorithm, but good enough for now.
+ // TODO(gri) fix asymptotic performance
+ used := make([]bool, len(xl))
+ var rl termlist
+ for i, xi := range xl {
+ if xi == nil || used[i] {
+ continue
+ }
+ for j := i + 1; j < len(xl); j++ {
+ xj := xl[j]
+ if xj == nil || used[j] {
+ continue
+ }
+ if u1, u2 := xi.union(xj); u2 == nil {
+ // If we encounter a 𝓤 term, the entire list is 𝓤.
+ // Exit early.
+ // (Note that this is not just an optimization;
+ // if we continue, we may end up with a 𝓤 term
+ // and other terms and the result would not be
+ // in normal form.)
+ if u1.typ == nil {
+ return allTermlist
+ }
+ xi = u1
+ used[j] = true // xj is now unioned into xi - ignore it in future iterations
+ }
+ }
+ rl = append(rl, xi)
+ }
+ return rl
+}
+
+// If the type set represented by xl is specified by a single (non-𝓤) term,
+// structuralType returns that type. Otherwise it returns nil.
+func (xl termlist) structuralType() Type {
+ if nl := xl.norm(); len(nl) == 1 {
+ return nl[0].typ // if nl.isAll() then typ is nil, which is ok
+ }
+ return nil
+}
+
+// union returns the union xl ∪ yl.
+func (xl termlist) union(yl termlist) termlist {
+ return append(xl, yl...).norm()
+}
+
+// intersect returns the intersection xl ∩ yl.
+func (xl termlist) intersect(yl termlist) termlist {
+ if xl.isEmpty() || yl.isEmpty() {
+ return nil
+ }
+
+ // Quadratic algorithm, but good enough for now.
+ // TODO(gri) fix asymptotic performance
+ var rl termlist
+ for _, x := range xl {
+ for _, y := range yl {
+ if r := x.intersect(y); r != nil {
+ rl = append(rl, r)
+ }
+ }
+ }
+ return rl.norm()
+}
+
+// equal reports whether xl and yl represent the same type set.
+func (xl termlist) equal(yl termlist) bool {
+ // TODO(gri) this should be more efficient
+ return xl.subsetOf(yl) && yl.subsetOf(xl)
+}
+
+// includes reports whether t ∈ xl.
+func (xl termlist) includes(t Type) bool {
+ for _, x := range xl {
+ if x.includes(t) {
+ return true
+ }
+ }
+ return false
+}
+
+// supersetOf reports whether y ⊆ xl.
+func (xl termlist) supersetOf(y *term) bool {
+ for _, x := range xl {
+ if y.subsetOf(x) {
+ return true
+ }
+ }
+ return false
+}
+
+// subsetOf reports whether xl ⊆ yl.
+func (xl termlist) subsetOf(yl termlist) bool {
+ if yl.isEmpty() {
+ return xl.isEmpty()
+ }
+
+ // each term x of xl must be a subset of yl
+ for _, x := range xl {
+ if !yl.supersetOf(x) {
+ return false // x is not a subset yl
+ }
+ }
+ return true
+}
diff --git a/src/cmd/compile/internal/types2/termlist_test.go b/src/cmd/compile/internal/types2/termlist_test.go
new file mode 100644
index 0000000000..ed1330d26f
--- /dev/null
+++ b/src/cmd/compile/internal/types2/termlist_test.go
@@ -0,0 +1,313 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package types2
+
+import (
+ "strings"
+ "testing"
+)
+
+// maketl makes a term list from a string of the term list.
+func maketl(s string) termlist {
+ s = strings.ReplaceAll(s, " ", "")
+ names := strings.Split(s, "∪")
+ r := make(termlist, len(names))
+ for i, n := range names {
+ r[i] = testTerm(n)
+ }
+ return r
+}
+
+func TestTermlistAll(t *testing.T) {
+ if !allTermlist.isAll() {
+ t.Errorf("allTermlist is not the set of all types")
+ }
+}
+
+func TestTermlistString(t *testing.T) {
+ for _, want := range []string{
+ "∅",
+ "𝓤",
+ "int",
+ "~int",
+ "myInt",
+ "∅ ∪ ∅",
+ "𝓤 ∪ 𝓤",
+ "∅ ∪ 𝓤 ∪ int",
+ "∅ ∪ 𝓤 ∪ int ∪ myInt",
+ } {
+ if got := maketl(want).String(); got != want {
+ t.Errorf("(%v).String() == %v", want, got)
+ }
+ }
+}
+
+func TestTermlistIsEmpty(t *testing.T) {
+ for test, want := range map[string]bool{
+ "∅": true,
+ "∅ ∪ ∅": true,
+ "∅ ∪ ∅ ∪ 𝓤": false,
+ "∅ ∪ ∅ ∪ myInt": false,
+ "𝓤": false,
+ "𝓤 ∪ int": false,
+ "𝓤 ∪ myInt ∪ ∅": false,
+ } {
+ xl := maketl(test)
+ got := xl.isEmpty()
+ if got != want {
+ t.Errorf("(%v).isEmpty() == %v; want %v", test, got, want)
+ }
+ }
+}
+
+func TestTermlistIsAll(t *testing.T) {
+ for test, want := range map[string]bool{
+ "∅": false,
+ "∅ ∪ ∅": false,
+ "int ∪ ~string": false,
+ "~int ∪ myInt": false,
+ "∅ ∪ ∅ ∪ 𝓤": true,
+ "𝓤": true,
+ "𝓤 ∪ int": true,
+ "myInt ∪ 𝓤": true,
+ } {
+ xl := maketl(test)
+ got := xl.isAll()
+ if got != want {
+ t.Errorf("(%v).isAll() == %v; want %v", test, got, want)
+ }
+ }
+}
+
+func TestTermlistNorm(t *testing.T) {
+ for _, test := range []struct {
+ xl, want string
+ }{
+ {"∅", "∅"},
+ {"∅ ∪ ∅", "∅"},
+ {"∅ ∪ int", "int"},
+ {"∅ ∪ myInt", "myInt"},
+ {"𝓤 ∪ int", "𝓤"},
+ {"𝓤 ∪ myInt", "𝓤"},
+ {"int ∪ myInt", "int ∪ myInt"},
+ {"~int ∪ int", "~int"},
+ {"~int ∪ myInt", "~int"},
+ {"int ∪ ~string ∪ int", "int ∪ ~string"},
+ {"~int ∪ string ∪ 𝓤 ∪ ~string ∪ int", "𝓤"},
+ {"~int ∪ string ∪ myInt ∪ ~string ∪ int", "~int ∪ ~string"},
+ } {
+ xl := maketl(test.xl)
+ got := maketl(test.xl).norm()
+ if got.String() != test.want {
+ t.Errorf("(%v).norm() = %v; want %v", xl, got, test.want)
+ }
+ }
+}
+
+func TestTermlistStructuralType(t *testing.T) {
+ // helper to deal with nil types
+ tstring := func(typ Type) string {
+ if typ == nil {
+ return "nil"
+ }
+ return typ.String()
+ }
+
+ for test, want := range map[string]string{
+ "∅": "nil",
+ "𝓤": "nil",
+ "int": "int",
+ "myInt": "myInt",
+ "~int": "int",
+ "~int ∪ string": "nil",
+ "~int ∪ myInt": "int",
+ "∅ ∪ int": "int",
+ "∅ ∪ ~int": "int",
+ "∅ ∪ ~int ∪ string": "nil",
+ } {
+ xl := maketl(test)
+ got := tstring(xl.structuralType())
+ if got != want {
+ t.Errorf("(%v).structuralType() == %v; want %v", test, got, want)
+ }
+ }
+}
+
+func TestTermlistUnion(t *testing.T) {
+ for _, test := range []struct {
+ xl, yl, want string
+ }{
+
+ {"∅", "∅", "∅"},
+ {"∅", "𝓤", "𝓤"},
+ {"∅", "int", "int"},
+ {"𝓤", "~int", "𝓤"},
+ {"int", "~int", "~int"},
+ {"int", "string", "int ∪ string"},
+ {"int", "myInt", "int ∪ myInt"},
+ {"~int", "myInt", "~int"},
+ {"int ∪ string", "~string", "int ∪ ~string"},
+ {"~int ∪ string", "~string ∪ int", "~int ∪ ~string"},
+ {"~int ∪ string ∪ ∅", "~string ∪ int", "~int ∪ ~string"},
+ {"~int ∪ myInt ∪ ∅", "~string ∪ int", "~int ∪ ~string"},
+ {"~int ∪ string ∪ 𝓤", "~string ∪ int", "𝓤"},
+ {"~int ∪ string ∪ myInt", "~string ∪ int", "~int ∪ ~string"},
+ } {
+ xl := maketl(test.xl)
+ yl := maketl(test.yl)
+ got := xl.union(yl).String()
+ if got != test.want {
+ t.Errorf("(%v).union(%v) = %v; want %v", test.xl, test.yl, got, test.want)
+ }
+ }
+}
+
+func TestTermlistIntersect(t *testing.T) {
+ for _, test := range []struct {
+ xl, yl, want string
+ }{
+
+ {"∅", "∅", "∅"},
+ {"∅", "𝓤", "∅"},
+ {"∅", "int", "∅"},
+ {"∅", "myInt", "∅"},
+ {"𝓤", "~int", "~int"},
+ {"𝓤", "myInt", "myInt"},
+ {"int", "~int", "int"},
+ {"int", "string", "∅"},
+ {"int", "myInt", "∅"},
+ {"~int", "myInt", "myInt"},
+ {"int ∪ string", "~string", "string"},
+ {"~int ∪ string", "~string ∪ int", "int ∪ string"},
+ {"~int ∪ string ∪ ∅", "~string ∪ int", "int ∪ string"},
+ {"~int ∪ myInt ∪ ∅", "~string ∪ int", "int"},
+ {"~int ∪ string ∪ 𝓤", "~string ∪ int", "int ∪ ~string"},
+ {"~int ∪ string ∪ myInt", "~string ∪ int", "int ∪ string"},
+ } {
+ xl := maketl(test.xl)
+ yl := maketl(test.yl)
+ got := xl.intersect(yl).String()
+ if got != test.want {
+ t.Errorf("(%v).intersect(%v) = %v; want %v", test.xl, test.yl, got, test.want)
+ }
+ }
+}
+
+func TestTermlistEqual(t *testing.T) {
+ for _, test := range []struct {
+ xl, yl string
+ want bool
+ }{
+ {"∅", "∅", true},
+ {"∅", "𝓤", false},
+ {"𝓤", "𝓤", true},
+ {"𝓤 ∪ int", "𝓤", true},
+ {"𝓤 ∪ int", "string ∪ 𝓤", true},
+ {"𝓤 ∪ myInt", "string ∪ 𝓤", true},
+ {"int ∪ ~string", "string ∪ int", false},
+ {"~int ∪ string", "string ∪ myInt", false},
+ {"int ∪ ~string ∪ ∅", "string ∪ int ∪ ~string", true},
+ } {
+ xl := maketl(test.xl)
+ yl := maketl(test.yl)
+ got := xl.equal(yl)
+ if got != test.want {
+ t.Errorf("(%v).equal(%v) = %v; want %v", test.xl, test.yl, got, test.want)
+ }
+ }
+}
+
+func TestTermlistIncludes(t *testing.T) {
+ for _, test := range []struct {
+ xl, typ string
+ want bool
+ }{
+ {"∅", "int", false},
+ {"𝓤", "int", true},
+ {"~int", "int", true},
+ {"int", "string", false},
+ {"~int", "string", false},
+ {"~int", "myInt", true},
+ {"int ∪ string", "string", true},
+ {"~int ∪ string", "int", true},
+ {"~int ∪ string", "myInt", true},
+ {"~int ∪ myInt ∪ ∅", "myInt", true},
+ {"myInt ∪ ∅ ∪ 𝓤", "int", true},
+ } {
+ xl := maketl(test.xl)
+ yl := testTerm(test.typ).typ
+ got := xl.includes(yl)
+ if got != test.want {
+ t.Errorf("(%v).includes(%v) = %v; want %v", test.xl, yl, got, test.want)
+ }
+ }
+}
+
+func TestTermlistSupersetOf(t *testing.T) {
+ for _, test := range []struct {
+ xl, typ string
+ want bool
+ }{
+ {"∅", "∅", true},
+ {"∅", "𝓤", false},
+ {"∅", "int", false},
+ {"𝓤", "∅", true},
+ {"𝓤", "𝓤", true},
+ {"𝓤", "int", true},
+ {"𝓤", "~int", true},
+ {"𝓤", "myInt", true},
+ {"~int", "int", true},
+ {"~int", "~int", true},
+ {"~int", "myInt", true},
+ {"int", "~int", false},
+ {"myInt", "~int", false},
+ {"int", "string", false},
+ {"~int", "string", false},
+ {"int ∪ string", "string", true},
+ {"int ∪ string", "~string", false},
+ {"~int ∪ string", "int", true},
+ {"~int ∪ string", "myInt", true},
+ {"~int ∪ string ∪ ∅", "string", true},
+ {"~string ∪ ∅ ∪ 𝓤", "myInt", true},
+ } {
+ xl := maketl(test.xl)
+ y := testTerm(test.typ)
+ got := xl.supersetOf(y)
+ if got != test.want {
+ t.Errorf("(%v).supersetOf(%v) = %v; want %v", test.xl, y, got, test.want)
+ }
+ }
+}
+
+func TestTermlistSubsetOf(t *testing.T) {
+ for _, test := range []struct {
+ xl, yl string
+ want bool
+ }{
+ {"∅", "∅", true},
+ {"∅", "𝓤", true},
+ {"𝓤", "∅", false},
+ {"𝓤", "𝓤", true},
+ {"int", "int ∪ string", true},
+ {"~int", "int ∪ string", false},
+ {"~int", "myInt ∪ string", false},
+ {"myInt", "~int ∪ string", true},
+ {"~int", "string ∪ string ∪ int ∪ ~int", true},
+ {"myInt", "string ∪ string ∪ ~int", true},
+ {"int ∪ string", "string", false},
+ {"int ∪ string", "string ∪ int", true},
+ {"int ∪ ~string", "string ∪ int", false},
+ {"myInt ∪ ~string", "string ∪ int ∪ 𝓤", true},
+ {"int ∪ ~string", "string ∪ int ∪ ∅ ∪ string", false},
+ {"int ∪ myInt", "string ∪ ~int ∪ ∅ ∪ string", true},
+ } {
+ xl := maketl(test.xl)
+ yl := maketl(test.yl)
+ got := xl.subsetOf(yl)
+ if got != test.want {
+ t.Errorf("(%v).subsetOf(%v) = %v; want %v", test.xl, test.yl, got, test.want)
+ }
+ }
+}
diff --git a/src/cmd/compile/internal/types2/testdata/check/builtins.go2 b/src/cmd/compile/internal/types2/testdata/check/builtins.go2
index 3918d836b5..0cfea93bf6 100644
--- a/src/cmd/compile/internal/types2/testdata/check/builtins.go2
+++ b/src/cmd/compile/internal/types2/testdata/check/builtins.go2
@@ -6,48 +6,222 @@
package builtins
-type Bmc interface {
- type map[rune]string, chan int
+import "unsafe"
+
+// close
+
+type C0 interface{ int }
+type C1 interface{ chan int }
+type C2 interface{ chan int | <-chan int }
+type C3 interface{ chan int | chan float32 }
+type C4 interface{ chan int | chan<- int }
+type C5[T any] interface{ ~chan T | chan<- T }
+
+func _[T any](ch T) {
+ close(ch /* ERROR cannot close non-channel */)
+}
+
+func _[T C0](ch T) {
+ close(ch /* ERROR cannot close non-channel */)
+}
+
+func _[T C1](ch T) {
+ close(ch)
+}
+
+func _[T C2](ch T) {
+ close(ch /* ERROR cannot close receive-only channel */)
+}
+
+func _[T C3](ch T) {
+ close(ch)
+}
+
+func _[T C4](ch T) {
+ close(ch)
+}
+
+func _[T C5[X], X any](ch T) {
+ close(ch)
+}
+
+// delete
+
+type M0 interface{ int }
+type M1 interface{ map[string]int }
+type M2 interface { map[string]int | map[string]float64 }
+type M3 interface{ map[string]int | map[rune]int }
+type M4[K comparable, V any] interface{ map[K]V | map[rune]V }
+
+func _[T any](m T) {
+ delete(m /* ERROR not a map */, "foo")
}
-type Bms interface {
- type map[string]int, []int
+func _[T M0](m T) {
+ delete(m /* ERROR not a map */, "foo")
}
-type Bcs interface {
- type chan bool, []float64
+func _[T M1](m T) {
+ delete(m, "foo")
}
-type Bss interface {
- type []int, []string
+func _[T M2](m T) {
+ delete(m, "foo")
+ delete(m, 0 /* ERROR cannot use .* as string */)
}
-func _[T any] () {
- _ = make(T /* ERROR invalid argument */ )
- _ = make(T /* ERROR invalid argument */ , 10)
- _ = make(T /* ERROR invalid argument */ , 10, 20)
+func _[T M3](m T) {
+ delete(m /* ERROR must have identical key types */, "foo")
}
-func _[T Bmc] () {
- _ = make(T)
- _ = make(T, 10)
- _ = make /* ERROR expects 1 or 2 arguments */ (T, 10, 20)
+func _[T M4[rune, V], V any](m T) {
+ delete(m, 'k')
}
-func _[T Bms] () {
- _ = make /* ERROR expects 2 arguments */ (T)
- _ = make(T, 10)
- _ = make /* ERROR expects 2 arguments */ (T, 10, 20)
+func _[T M4[K, V], K comparable, V any](m T) {
+ delete(m /* ERROR must have identical key types */, "foo")
}
-func _[T Bcs] () {
- _ = make /* ERROR expects 2 arguments */ (T)
- _ = make(T, 10)
- _ = make /* ERROR expects 2 arguments */ (T, 10, 20)
+// make
+
+func _[
+ S1 interface{ []int },
+ S2 interface{ []int | chan int },
+
+ M1 interface{ map[string]int },
+ M2 interface{ map[string]int | chan int },
+
+ C1 interface{ chan int },
+ C2 interface{ chan int | chan string },
+]() {
+ type S0 []int
+ _ = make([]int, 10)
+ _ = make(S0, 10)
+ _ = make(S1, 10)
+ _ = make /* ERROR not enough arguments */ ()
+ _ = make /* ERROR expects 2 or 3 arguments */ (S1)
+ _ = make(S1, 10, 20)
+ _ = make /* ERROR expects 2 or 3 arguments */ (S1, 10, 20, 30)
+ _ = make(S2 /* ERROR cannot make .* no structural type */ , 10)
+
+ type M0 map[string]int
+ _ = make(map[string]int)
+ _ = make(M0)
+ _ = make(M1)
+ _ = make(M1, 10)
+ _ = make/* ERROR expects 1 or 2 arguments */(M1, 10, 20)
+ _ = make(M2 /* ERROR cannot make .* no structural type */ )
+
+ type C0 chan int
+ _ = make(chan int)
+ _ = make(C0)
+ _ = make(C1)
+ _ = make(C1, 10)
+ _ = make/* ERROR expects 1 or 2 arguments */(C1, 10, 20)
+ _ = make(C2 /* ERROR cannot make .* no structural type */ )
}
-func _[T Bss] () {
- _ = make /* ERROR expects 2 or 3 arguments */ (T)
- _ = make(T, 10)
- _ = make(T, 10, 20)
+// unsafe.Alignof
+
+func _[T comparable]() {
+ var (
+ b int64
+ a [10]T
+ s struct{ f T }
+ p *T
+ l []T
+ f func(T)
+ i interface{ m() T }
+ c chan T
+ m map[T]T
+ t T
+ )
+
+ const bb = unsafe.Alignof(b)
+ assert(bb == 8)
+ const _ = unsafe /* ERROR not constant */ .Alignof(a)
+ const _ = unsafe /* ERROR not constant */ .Alignof(s)
+ const pp = unsafe.Alignof(p)
+ assert(pp == 8)
+ const ll = unsafe.Alignof(l)
+ assert(ll == 8)
+ const ff = unsafe.Alignof(f)
+ assert(ff == 8)
+ const ii = unsafe.Alignof(i)
+ assert(ii == 8)
+ const cc = unsafe.Alignof(c)
+ assert(cc == 8)
+ const mm = unsafe.Alignof(m)
+ assert(mm == 8)
+ const _ = unsafe /* ERROR not constant */ .Alignof(t)
+}
+
+// unsafe.Offsetof
+
+func _[T comparable]() {
+ var (
+ b struct{ _, f int64 }
+ a struct{ _, f [10]T }
+ s struct{ _, f struct{ f T } }
+ p struct{ _, f *T }
+ l struct{ _, f []T }
+ f struct{ _, f func(T) }
+ i struct{ _, f interface{ m() T } }
+ c struct{ _, f chan T }
+ m struct{ _, f map[T]T }
+ t struct{ _, f T }
+ )
+
+ const bb = unsafe.Offsetof(b.f)
+ assert(bb == 8)
+ const _ = unsafe /* ERROR not constant */ .Alignof(a)
+ const _ = unsafe /* ERROR not constant */ .Alignof(s)
+ const pp = unsafe.Offsetof(p.f)
+ assert(pp == 8)
+ const ll = unsafe.Offsetof(l.f)
+ assert(ll == 24)
+ const ff = unsafe.Offsetof(f.f)
+ assert(ff == 8)
+ const ii = unsafe.Offsetof(i.f)
+ assert(ii == 16)
+ const cc = unsafe.Offsetof(c.f)
+ assert(cc == 8)
+ const mm = unsafe.Offsetof(m.f)
+ assert(mm == 8)
+ const _ = unsafe /* ERROR not constant */ .Alignof(t)
+}
+
+// unsafe.Sizeof
+
+func _[T comparable]() {
+ var (
+ b int64
+ a [10]T
+ s struct{ f T }
+ p *T
+ l []T
+ f func(T)
+ i interface{ m() T }
+ c chan T
+ m map[T]T
+ t T
+ )
+
+ const bb = unsafe.Sizeof(b)
+ assert(bb == 8)
+ const _ = unsafe /* ERROR not constant */ .Alignof(a)
+ const _ = unsafe /* ERROR not constant */ .Alignof(s)
+ const pp = unsafe.Sizeof(p)
+ assert(pp == 8)
+ const ll = unsafe.Sizeof(l)
+ assert(ll == 24)
+ const ff = unsafe.Sizeof(f)
+ assert(ff == 8)
+ const ii = unsafe.Sizeof(i)
+ assert(ii == 16)
+ const cc = unsafe.Sizeof(c)
+ assert(cc == 8)
+ const mm = unsafe.Sizeof(m)
+ assert(mm == 8)
+ const _ = unsafe /* ERROR not constant */ .Alignof(t)
}
diff --git a/src/cmd/compile/internal/types2/testdata/check/builtins.src b/src/cmd/compile/internal/types2/testdata/check/builtins.src
index 6d1f47129b..17e4068d65 100644
--- a/src/cmd/compile/internal/types2/testdata/check/builtins.src
+++ b/src/cmd/compile/internal/types2/testdata/check/builtins.src
@@ -144,7 +144,7 @@ func close1() {
var r <-chan int
close() // ERROR not enough arguments
close(1, 2) // ERROR too many arguments
- close(42 /* ERROR not a channel */)
+ close(42 /* ERROR cannot close non-channel */)
close(r /* ERROR receive-only channel */)
close(c)
_ = close /* ERROR used as value */ (c)
diff --git a/src/cmd/compile/internal/types2/testdata/check/const0.src b/src/cmd/compile/internal/types2/testdata/check/const0.src
index 5608b1549b..3cffdf904c 100644
--- a/src/cmd/compile/internal/types2/testdata/check/const0.src
+++ b/src/cmd/compile/internal/types2/testdata/check/const0.src
@@ -27,7 +27,7 @@ const (
ub1 = true
ub2 = 2 < 1
ub3 = ui1 == uf1
- ub4 = true /* ERROR "cannot convert" */ == 0
+ ub4 = true /* ERROR "mismatched types untyped bool and untyped int" */ == 0
// integer values
ui0 = 0
diff --git a/src/cmd/compile/internal/types2/testdata/check/cycles4.src b/src/cmd/compile/internal/types2/testdata/check/cycles4.src
index 445babca68..924aabf475 100644
--- a/src/cmd/compile/internal/types2/testdata/check/cycles4.src
+++ b/src/cmd/compile/internal/types2/testdata/check/cycles4.src
@@ -4,6 +4,8 @@
package p
+import "unsafe"
+
// Check that all methods of T are collected before
// determining the result type of m (which embeds
// all methods of T).
@@ -13,7 +15,7 @@ type T interface {
E
}
-var _ = T.m(nil).m().e()
+var _ int = T.m(nil).m().e()
type E interface {
e() int
@@ -22,7 +24,7 @@ type E interface {
// Check that unresolved forward chains are followed
// (see also comment in resolver.go, checker.typeDecl).
-var _ = C.m(nil).m().e()
+var _ int = C.m(nil).m().e()
type A B
@@ -108,3 +110,12 @@ type Element interface {
type Event interface {
Target() Element
}
+
+// Check that accessing an interface method too early doesn't lead
+// to follow-on errors due to an incorrectly computed type set.
+
+type T8 interface {
+ m() [unsafe.Sizeof(T8.m /* ERROR undefined */ )]int
+}
+
+var _ = T8.m // no error expected here
diff --git a/src/cmd/compile/internal/types2/testdata/check/decls0.src b/src/cmd/compile/internal/types2/testdata/check/decls0.src
index e78d8867e0..f051a4f2ac 100644
--- a/src/cmd/compile/internal/types2/testdata/check/decls0.src
+++ b/src/cmd/compile/internal/types2/testdata/check/decls0.src
@@ -4,7 +4,7 @@
// type declarations
-package decls0
+package go1_17 // don't permit non-interface elements in interfaces
import "unsafe"
@@ -185,10 +185,10 @@ func f2(x *f2 /* ERROR "not a type" */ ) {}
func f3() (x f3 /* ERROR "not a type" */ ) { return }
func f4() (x *f4 /* ERROR "not a type" */ ) { return }
-func (S0) m1(x S0 /* ERROR value .* is not a type */ .m1) {}
-func (S0) m2(x *S0 /* ERROR value .* is not a type */ .m2) {}
-func (S0) m3() (x S0 /* ERROR value .* is not a type */ .m3) { return }
-func (S0) m4() (x *S0 /* ERROR value .* is not a type */ .m4) { return }
+func (S0) m1(x S0 /* ERROR illegal cycle in method declaration */ .m1) {}
+func (S0) m2(x *S0 /* ERROR illegal cycle in method declaration */ .m2) {}
+func (S0) m3() (x S0 /* ERROR illegal cycle in method declaration */ .m3) { return }
+func (S0) m4() (x *S0 /* ERROR illegal cycle in method declaration */ .m4) { return }
// interfaces may not have any blank methods
type BlankI interface {
diff --git a/src/cmd/compile/internal/types2/testdata/check/decls1.src b/src/cmd/compile/internal/types2/testdata/check/decls1.src
index e6beb78358..1167ced366 100644
--- a/src/cmd/compile/internal/types2/testdata/check/decls1.src
+++ b/src/cmd/compile/internal/types2/testdata/check/decls1.src
@@ -83,7 +83,7 @@ var (
// Constant expression initializations
var (
- v1 = 1 /* ERROR "cannot convert" */ + "foo"
+ v1 = 1 /* ERROR "mismatched types untyped int and untyped string" */ + "foo"
v2 = c + 255
v3 = c + 256 /* ERROR "overflows" */
v4 = r + 2147483647
diff --git a/src/cmd/compile/internal/types2/testdata/check/expr1.src b/src/cmd/compile/internal/types2/testdata/check/expr1.src
index 4ead815158..85ad234bbb 100644
--- a/src/cmd/compile/internal/types2/testdata/check/expr1.src
+++ b/src/cmd/compile/internal/types2/testdata/check/expr1.src
@@ -111,10 +111,10 @@ type mystring string
func _(x, y string, z mystring) {
x = x + "foo"
x = x /* ERROR not defined */ - "foo"
- x = x + 1 // ERROR cannot convert
+ x = x + 1 // ERROR mismatched types string and untyped int
x = x + y
x = x /* ERROR not defined */ - y
- x = x * 10 // ERROR cannot convert
+ x = x * 10 // ERROR mismatched types string and untyped int
}
func f() (a, b int) { return }
diff --git a/src/cmd/compile/internal/types2/testdata/check/expr2.src b/src/cmd/compile/internal/types2/testdata/check/expr2.src
index 0c959e8011..f9726b5de5 100644
--- a/src/cmd/compile/internal/types2/testdata/check/expr2.src
+++ b/src/cmd/compile/internal/types2/testdata/check/expr2.src
@@ -10,7 +10,7 @@ func _bool() {
const t = true == true
const f = true == false
_ = t /* ERROR "cannot compare" */ < f
- _ = 0 /* ERROR "cannot convert" */ == t
+ _ = 0 /* ERROR "mismatched types untyped int and untyped bool" */ == t
var b bool
var x, y float32
b = x < y
diff --git a/src/cmd/compile/internal/types2/testdata/check/expr3.src b/src/cmd/compile/internal/types2/testdata/check/expr3.src
index eab3f72c4d..fd28421dc8 100644
--- a/src/cmd/compile/internal/types2/testdata/check/expr3.src
+++ b/src/cmd/compile/internal/types2/testdata/check/expr3.src
@@ -104,7 +104,7 @@ func indexes() {
var ok mybool
_, ok = m["bar"]
_ = ok
- _ = m[0 /* ERROR "cannot use 0" */ ] + "foo" // ERROR "cannot convert"
+ _ = m[0 /* ERROR "cannot use 0" */ ] + "foo" // ERROR "mismatched types int and untyped string"
var t string
_ = t[- /* ERROR "negative" */ 1]
diff --git a/src/cmd/compile/internal/types2/testdata/check/issues.go2 b/src/cmd/compile/internal/types2/testdata/check/issues.go2
index 1c73b5da92..effc2db7ae 100644
--- a/src/cmd/compile/internal/types2/testdata/check/issues.go2
+++ b/src/cmd/compile/internal/types2/testdata/check/issues.go2
@@ -24,25 +24,23 @@ func _() {
eql[io.Reader](nil, nil)
}
-// If we have a receiver of pointer type (below: *T) we must ignore
-// the pointer in the implementation of the method lookup because
-// the type bound of T is an interface and pointer to interface types
-// have no methods and then the lookup would fail.
+// If we have a receiver of pointer to type parameter type (below: *T)
+// we don't have any methods, like for interfaces.
type C[T any] interface {
m()
}
// using type bound C
func _[T C[T]](x *T) {
- x.m()
+ x.m /* ERROR x\.m undefined */ ()
}
// using an interface literal as bound
func _[T interface{ m() }](x *T) {
- x.m()
+ x.m /* ERROR x\.m undefined */ ()
}
-func f2[_ interface{ m1(); m2() }]()
+func f2[_ interface{ m1(); m2() }]() {}
type T struct{}
func (T) m1()
@@ -57,15 +55,15 @@ func _() {
// type with a type list constraint, all of the type argument's types in its
// bound, but at least one (!), must be in the type list of the bound of the
// corresponding parameterized type's type parameter.
-type T1[P interface{type uint}] struct{}
+type T1[P interface{~uint}] struct{}
func _[P any]() {
- _ = T1[P /* ERROR P has no type constraints */ ]{}
+ _ = T1[P /* ERROR P has no constraints */ ]{}
}
// This is the original (simplified) program causing the same issue.
type Unsigned interface {
- type uint
+ ~uint
}
type T2[U Unsigned] struct {
@@ -76,8 +74,8 @@ func (u T2[U]) Add1() U {
return u.s + 1
}
-func NewT2[U any]() T2[U /* ERROR U has no type constraints */ ] {
- return T2[U /* ERROR U has no type constraints */ ]{}
+func NewT2[U any]() T2[U /* ERROR U has no constraints */ ] {
+ return T2[U /* ERROR U has no constraints */ ]{}
}
func _() {
@@ -156,7 +154,7 @@ type inf2[T any] struct{ inf2 /* ERROR illegal cycle */ [T] }
// predicate disjunction in the implementation was wrong because if a type list
// contains both an integer and a floating-point type, the type parameter is
// neither an integer or a floating-point number.
-func convert[T1, T2 interface{type int, uint, float32}](v T1) T2 {
+func convert[T1, T2 interface{~int | ~uint | ~float32}](v T1) T2 {
return T2(v)
}
@@ -168,12 +166,12 @@ func _() {
// both numeric, or both strings. The implementation had the same problem
// with this check as the conversion issue above (issue #39623).
-func issue39623[T interface{type int, string}](x, y T) T {
+func issue39623[T interface{~int | ~string}](x, y T) T {
return x + y
}
// Simplified, from https://go2goplay.golang.org/p/efS6x6s-9NI:
-func Sum[T interface{type int, string}](s []T) (sum T) {
+func Sum[T interface{~int | ~string}](s []T) (sum T) {
for _, v := range s {
sum += v
}
@@ -182,19 +180,19 @@ func Sum[T interface{type int, string}](s []T) (sum T) {
// Assignability of an unnamed pointer type to a type parameter that
// has a matching underlying type.
-func _[T interface{}, PT interface{type *T}] (x T) PT {
+func _[T interface{}, PT interface{~*T}] (x T) PT {
return &x
}
// Indexing of generic types containing type parameters in their type list:
-func at[T interface{ type []E }, E interface{}](x T, i int) E {
+func at[T interface{ ~[]E }, E interface{}](x T, i int) E {
return x[i]
}
// A generic type inside a function acts like a named type. Its underlying
// type is itself, its "operational type" is defined by the type list in
// the tybe bound, if any.
-func _[T interface{type int}](x T) {
+func _[T interface{~int}](x T) {
type myint int
var _ int = int(x)
var _ T = 42
@@ -203,24 +201,24 @@ func _[T interface{type int}](x T) {
// Indexing a generic type with an array type bound checks length.
// (Example by mdempsky@.)
-func _[T interface { type [10]int }](x T) {
+func _[T interface { ~[10]int }](x T) {
_ = x[9] // ok
_ = x[20 /* ERROR out of bounds */ ]
}
// Pointer indirection of a generic type.
-func _[T interface{ type *int }](p T) int {
+func _[T interface{ ~*int }](p T) int {
return *p
}
// Channel sends and receives on generic types.
-func _[T interface{ type chan int }](ch T) int {
+func _[T interface{ ~chan int }](ch T) int {
ch <- 0
return <- ch
}
// Calling of a generic variable.
-func _[T interface{ type func() }](f T) {
+func _[T interface{ ~func() }](f T) {
f()
go f()
}
@@ -232,9 +230,9 @@ func _[T interface{ type func() }](f T) {
// type parameter that was substituted with a defined type.
// Test case from an (originally) failing example.
-type sliceOf[E any] interface{ type []E }
+type sliceOf[E any] interface{ ~[]E }
-func append[T interface{}, S sliceOf[T], T2 interface{ type T }](s S, t ...T2) S
+func append[T interface{}, S sliceOf[T], T2 interface{}](s S, t ...T2) S { panic(0) }
var f func()
var cancelSlice []context.CancelFunc
@@ -242,7 +240,7 @@ var _ = append[context.CancelFunc, []context.CancelFunc, context.CancelFunc](can
// A generic function must be instantiated with a type, not a value.
-func g[T any](T) T
+func g[T any](T) T { panic(0) }
var _ = g[int]
var _ = g[nil /* ERROR is not a type */ ]
diff --git a/src/cmd/compile/internal/types2/testdata/check/issues.src b/src/cmd/compile/internal/types2/testdata/check/issues.src
index 21aa208cc7..692ed37ef4 100644
--- a/src/cmd/compile/internal/types2/testdata/check/issues.src
+++ b/src/cmd/compile/internal/types2/testdata/check/issues.src
@@ -2,11 +2,11 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package issues
+package go1_17 // don't permit non-interface elements in interfaces
import (
"fmt"
- syn "cmd/compile/internal/syntax"
+ syn "regexp/syntax"
t1 "text/template"
t2 "html/template"
)
@@ -329,10 +329,10 @@ func (... /* ERROR can only use ... with final parameter in list */ TT) f()
func issue28281g() (... /* ERROR can only use ... with final parameter in list */ TT)
// Issue #26234: Make various field/method lookup errors easier to read by matching cmd/compile's output
-func issue26234a(f *syn.File) {
+func issue26234a(f *syn.Prog) {
// The error message below should refer to the actual package name (syntax)
// not the local package name (syn).
- f.foo /* ERROR f.foo undefined \(type \*syntax.File has no field or method foo\) */
+ f.foo /* ERROR f\.foo undefined \(type \*syntax\.Prog has no field or method foo\) */
}
type T struct {
@@ -357,11 +357,11 @@ func issue35895() {
var _ T = 0 // ERROR cannot use 0 \(untyped int constant\) as T
// There is only one package with name syntax imported, only use the (global) package name in error messages.
- var _ *syn.File = 0 // ERROR cannot use 0 \(untyped int constant\) as \*syntax.File
+ var _ *syn.Prog = 0 // ERROR cannot use 0 \(untyped int constant\) as \*syntax.Prog
// Because both t1 and t2 have the same global package name (template),
// qualify packages with full path name in this case.
- var _ t1.Template = t2 /* ERROR cannot use .* \(value of type "html/template".Template\) as "text/template".Template */ .Template{}
+ var _ t1.Template = t2 /* ERROR cannot use .* \(value of type .html/template.\.Template\) as .text/template.\.Template */ .Template{}
}
func issue42989(s uint) {
diff --git a/src/cmd/compile/internal/types2/testdata/check/linalg.go2 b/src/cmd/compile/internal/types2/testdata/check/linalg.go2
index 0d27603a58..efc090a1d1 100644
--- a/src/cmd/compile/internal/types2/testdata/check/linalg.go2
+++ b/src/cmd/compile/internal/types2/testdata/check/linalg.go2
@@ -9,10 +9,10 @@ import "math"
// Numeric is type bound that matches any numeric type.
// It would likely be in a constraints package in the standard library.
type Numeric interface {
- type int, int8, int16, int32, int64,
- uint, uint8, uint16, uint32, uint64, uintptr,
- float32, float64,
- complex64, complex128
+ ~int | ~int8 | ~int16 | ~int32 | ~int64 |
+ ~uint | ~uint8 | ~uint16 | ~uint32 | ~uint64 | ~uintptr |
+ ~float32 | ~float64 |
+ ~complex64 | ~complex128
}
func DotProduct[T Numeric](s1, s2 []T) T {
@@ -42,14 +42,14 @@ func AbsDifference[T NumericAbs[T]](a, b T) T {
// OrderedNumeric is a type bound that matches numeric types that support the < operator.
type OrderedNumeric interface {
- type int, int8, int16, int32, int64,
- uint, uint8, uint16, uint32, uint64, uintptr,
- float32, float64
+ ~int | ~int8 | ~int16 | ~int32 | ~int64 |
+ ~uint | ~uint8 | ~uint16 | ~uint32 | ~uint64 | ~uintptr |
+ ~float32 | ~float64
}
// Complex is a type bound that matches the two complex types, which do not have a < operator.
type Complex interface {
- type complex64, complex128
+ ~complex64 | ~complex128
}
// OrderedAbs is a helper type that defines an Abs method for
diff --git a/src/cmd/compile/internal/types2/testdata/check/map2.go2 b/src/cmd/compile/internal/types2/testdata/check/map2.go2
index 2833445662..be2c49f621 100644
--- a/src/cmd/compile/internal/types2/testdata/check/map2.go2
+++ b/src/cmd/compile/internal/types2/testdata/check/map2.go2
@@ -114,7 +114,7 @@ func (it *Iterator[K, V]) Next() (K, V, bool) {
// chans
-func chans_Ranger[T any]() (*chans_Sender[T], *chans_Receiver[T])
+func chans_Ranger[T any]() (*chans_Sender[T], *chans_Receiver[T]) { panic(0) }
// A sender is used to send values to a Receiver.
type chans_Sender[T any] struct {
diff --git a/src/cmd/compile/internal/types2/testdata/check/mtypeparams.go2 b/src/cmd/compile/internal/types2/testdata/check/mtypeparams.go2
index c2f282bae1..1b406593f8 100644
--- a/src/cmd/compile/internal/types2/testdata/check/mtypeparams.go2
+++ b/src/cmd/compile/internal/types2/testdata/check/mtypeparams.go2
@@ -10,7 +10,7 @@ package p
type S struct{}
-func (S) m[T any](v T)
+func (S) m[T any](v T) {}
// TODO(gri) Once we collect interface method type parameters
// in the parser, we can enable these tests again.
diff --git a/src/cmd/compile/internal/types2/testdata/check/stmt0.src b/src/cmd/compile/internal/types2/testdata/check/stmt0.src
index bedcbe5fce..d744f2ba81 100644
--- a/src/cmd/compile/internal/types2/testdata/check/stmt0.src
+++ b/src/cmd/compile/internal/types2/testdata/check/stmt0.src
@@ -49,18 +49,18 @@ func assignments1() {
b = true
i += 1
- i += "foo" /* ERROR "cannot convert.*int" */
+ i += "foo" /* ERROR "mismatched types int and untyped string" */
f -= 1
f /= 0
f = float32(0)/0 /* ERROR "division by zero" */
- f -= "foo" /* ERROR "cannot convert.*float64" */
+ f -= "foo" /* ERROR "mismatched types float64 and untyped string" */
c *= 1
c /= 0
s += "bar"
- s += 1 /* ERROR "cannot convert.*string" */
+ s += 1 /* ERROR "mismatched types string and untyped int" */
var u64 uint64
u64 += 1<<u64
@@ -937,13 +937,13 @@ func issue6766b() {
// errors reported).
func issue10148() {
for y /* ERROR declared but not used */ := range "" {
- _ = "" /* ERROR cannot convert */ + 1
+ _ = "" /* ERROR mismatched types untyped string and untyped int*/ + 1
}
for range 1 /* ERROR cannot range over 1 */ {
- _ = "" /* ERROR cannot convert */ + 1
+ _ = "" /* ERROR mismatched types untyped string and untyped int*/ + 1
}
for y := range 1 /* ERROR cannot range over 1 */ {
- _ = "" /* ERROR cannot convert */ + 1
+ _ = "" /* ERROR mismatched types untyped string and untyped int*/ + 1
}
}
diff --git a/src/cmd/compile/internal/types2/testdata/check/tinference.go2 b/src/cmd/compile/internal/types2/testdata/check/tinference.go2
index a53fde0a2a..0afb77c1e4 100644
--- a/src/cmd/compile/internal/types2/testdata/check/tinference.go2
+++ b/src/cmd/compile/internal/types2/testdata/check/tinference.go2
@@ -8,36 +8,38 @@ import "strconv"
type any interface{}
-func f0[A any, B interface{type C}, C interface{type D}, D interface{type A}](A, B, C, D)
-func _() {
- f := f0[string]
- f("a", "b", "c", "d")
- f0("a", "b", "c", "d")
-}
-
-func f1[A any, B interface{type A}](A, B)
-func _() {
- f := f1[int]
- f(int(0), int(0))
- f1(int(0), int(0))
-}
-
-func f2[A any, B interface{type []A}](A, B)
+// Embedding stand-alone type parameters is not permitted for now. Disabled.
+// func f0[A any, B interface{~C}, C interface{~D}, D interface{~A}](A, B, C, D)
+// func _() {
+// f := f0[string]
+// f("a", "b", "c", "d")
+// f0("a", "b", "c", "d")
+// }
+//
+// func f1[A any, B interface{~A}](A, B)
+// func _() {
+// f := f1[int]
+// f(int(0), int(0))
+// f1(int(0), int(0))
+// }
+
+func f2[A any, B interface{~[]A}](A, B) {}
func _() {
f := f2[byte]
f(byte(0), []byte{})
f2(byte(0), []byte{})
}
-func f3[A any, B interface{type C}, C interface{type *A}](A, B, C)
-func _() {
- f := f3[int]
- var x int
- f(x, &x, &x)
- f3(x, &x, &x)
-}
+// Embedding stand-alone type parameters is not permitted for now. Disabled.
+// func f3[A any, B interface{~C}, C interface{~*A}](A, B, C)
+// func _() {
+// f := f3[int]
+// var x int
+// f(x, &x, &x)
+// f3(x, &x, &x)
+// }
-func f4[A any, B interface{type []C}, C interface{type *A}](A, B, C)
+func f4[A any, B interface{~[]C}, C interface{~*A}](A, B, C) {}
func _() {
f := f4[int]
var x int
@@ -45,14 +47,14 @@ func _() {
f4(x, []*int{}, &x)
}
-func f5[A interface{type struct{b B; c C}}, B any, C interface{type *B}](x B) A
+func f5[A interface{~struct{b B; c C}}, B any, C interface{~*B}](x B) A { panic(0) }
func _() {
x := f5(1.2)
var _ float64 = x.b
var _ float64 = *x.c
}
-func f6[A any, B interface{type struct{f []A}}](B) A
+func f6[A any, B interface{~struct{f []A}}](B) A { panic(0) }
func _() {
x := f6(struct{f []string}{})
var _ string = x
@@ -60,11 +62,11 @@ func _() {
// TODO(gri) Need to flag invalid recursive constraints. At the
// moment these cause infinite recursions and stack overflow.
-// func f7[A interface{type B}, B interface{type A}]()
+// func f7[A interface{type B}, B interface{~A}]()
// More realistic examples
-func Double[S interface{ type []E }, E interface{ type int, int8, int16, int32, int64 }](s S) S {
+func Double[S interface{ ~[]E }, E interface{ ~int | ~int8 | ~int16 | ~int32 | ~int64 }](s S) S {
r := make(S, len(s))
for i, v := range s {
r[i] = v + v
@@ -80,7 +82,7 @@ var _ = Double(MySlice{1})
type Setter[B any] interface {
Set(string)
- type *B
+ ~*B
}
func FromStrings[T interface{}, PT Setter[T]](s []string) []T {
diff --git a/src/cmd/compile/internal/types2/testdata/check/typeinst2.go2 b/src/cmd/compile/internal/types2/testdata/check/typeinst2.go2
index 6e2104a515..d087c26a47 100644
--- a/src/cmd/compile/internal/types2/testdata/check/typeinst2.go2
+++ b/src/cmd/compile/internal/types2/testdata/check/typeinst2.go2
@@ -85,7 +85,7 @@ type NumericAbs[T any] interface {
Abs() T
}
-func AbsDifference[T NumericAbs[T]](x T)
+func AbsDifference[T NumericAbs[T]](x T) { panic(0) }
type OrderedAbs[T any] T
@@ -97,7 +97,7 @@ func OrderedAbsDifference[T any](x T) {
// same code, reduced to essence
-func g[P interface{ m() P }](x P)
+func g[P interface{ m() P }](x P) { panic(0) }
type T4[P any] P
@@ -148,15 +148,15 @@ func _[T any](r R2[T, int], p *R2[string, T]) {
p.pm()
}
-// An interface can (explicitly) declare at most one type list.
+// It is ok to have multiple embedded unions.
type _ interface {
m0()
- type int, string, bool
- type /* ERROR multiple type lists */ float32, float64
+ ~int | ~string | ~bool
+ ~float32 | ~float64
m1()
m2()
- type /* ERROR multiple type lists */ complex64, complex128
- type /* ERROR multiple type lists */ rune
+ ~complex64 | ~complex128
+ ~rune
}
// Interface type lists may contain each type at most once.
@@ -164,23 +164,24 @@ type _ interface {
// for them to be all in a single list, and we report the error
// as well.)
type _ interface {
- type int, int /* ERROR duplicate type int */
- type /* ERROR multiple type lists */ int /* ERROR duplicate type int */
+ ~int|~int /* ERROR overlapping terms ~int */
+ ~int|int /* ERROR overlapping terms int */
+ int|int /* ERROR overlapping terms int */
}
type _ interface {
- type struct{f int}, struct{g int}, struct /* ERROR duplicate type */ {f int}
+ ~struct{f int} | ~struct{g int} | ~struct /* ERROR overlapping terms */ {f int}
}
// Interface type lists can contain any type, incl. *Named types.
// Verify that we use the underlying type to compute the operational type.
type MyInt int
-func add1[T interface{type MyInt}](x T) T {
+func add1[T interface{MyInt}](x T) T {
return x + 1
}
type MyString string
-func double[T interface{type MyInt, MyString}](x T) T {
+func double[T interface{MyInt|MyString}](x T) T {
return x + x
}
@@ -189,22 +190,22 @@ func double[T interface{type MyInt, MyString}](x T) T {
// type lists.
type E0 interface {
- type int, bool, string
+ ~int | ~bool | ~string
}
type E1 interface {
- type int, float64, string
+ ~int | ~float64 | ~string
}
type E2 interface {
- type float64
+ ~float64
}
type I0 interface {
E0
}
-func f0[T I0]()
+func f0[T I0]() {}
var _ = f0[int]
var _ = f0[bool]
var _ = f0[string]
@@ -215,7 +216,7 @@ type I01 interface {
E1
}
-func f01[T I01]()
+func f01[T I01]() {}
var _ = f01[int]
var _ = f01[bool /* ERROR does not satisfy I0 */ ]
var _ = f01[string]
@@ -227,7 +228,7 @@ type I012 interface {
E2
}
-func f012[T I012]()
+func f012[T I012]() {}
var _ = f012[int /* ERROR does not satisfy I012 */ ]
var _ = f012[bool /* ERROR does not satisfy I012 */ ]
var _ = f012[string /* ERROR does not satisfy I012 */ ]
@@ -238,7 +239,7 @@ type I12 interface {
E2
}
-func f12[T I12]()
+func f12[T I12]() {}
var _ = f12[int /* ERROR does not satisfy I12 */ ]
var _ = f12[bool /* ERROR does not satisfy I12 */ ]
var _ = f12[string /* ERROR does not satisfy I12 */ ]
@@ -246,10 +247,10 @@ var _ = f12[float64]
type I0_ interface {
E0
- type int
+ ~int
}
-func f0_[T I0_]()
+func f0_[T I0_]() {}
var _ = f0_[int]
var _ = f0_[bool /* ERROR does not satisfy I0_ */ ]
var _ = f0_[string /* ERROR does not satisfy I0_ */ ]
diff --git a/src/cmd/compile/internal/types2/testdata/check/typeparams.go2 b/src/cmd/compile/internal/types2/testdata/check/typeparams.go2
index badda01105..1ad80b1e1b 100644
--- a/src/cmd/compile/internal/types2/testdata/check/typeparams.go2
+++ b/src/cmd/compile/internal/types2/testdata/check/typeparams.go2
@@ -6,18 +6,18 @@ package p
// import "io" // for type assertion tests
-// The predeclared identifier "any" is only visible as a constraint
+// The predeclared identifier "any" can only be used as a constraint
// in a type parameter list.
-var _ any // ERROR undeclared
-func _[_ any /* ok here */ , _ interface{any /* ERROR undeclared */ }](any /* ERROR undeclared */ ) {
- var _ any /* ERROR undeclared */
+var _ any // ERROR cannot use any outside constraint position
+func _[_ any /* ok here */ , _ interface{any /* ERROR constraint */ }](any /* ERROR constraint */ ) {
+ var _ any /* ERROR constraint */
}
func identity[T any](x T) T { return x }
-func _[_ any](x int) int
-func _[T any](T /* ERROR redeclared */ T)()
-func _[T, T /* ERROR redeclared */ any]()
+func _[_ any](x int) int { panic(0) }
+func _[T any](T /* ERROR redeclared */ T)() {}
+func _[T, T /* ERROR redeclared */ any]() {}
// Constraints (incl. any) may be parenthesized.
func _[_ (any)]() {}
@@ -52,22 +52,22 @@ func swapswap[A, B any](a A, b B) (A, B) {
type F[A, B any] func(A, B) (B, A)
-func min[T interface{ type int }](x, y T) T {
+func min[T interface{ ~int }](x, y T) T {
if x < y {
return x
}
return y
}
-func _[T interface{type int, float32}](x, y T) bool { return x < y }
+func _[T interface{~int | ~float32}](x, y T) bool { return x < y }
func _[T any](x, y T) bool { return x /* ERROR cannot compare */ < y }
-func _[T interface{type int, float32, bool}](x, y T) bool { return x /* ERROR cannot compare */ < y }
+func _[T interface{~int | ~float32 | ~bool}](x, y T) bool { return x /* ERROR cannot compare */ < y }
func _[T C1[T]](x, y T) bool { return x /* ERROR cannot compare */ < y }
func _[T C2[T]](x, y T) bool { return x < y }
type C1[T any] interface{}
-type C2[T any] interface{ type int, float32 }
+type C2[T any] interface{ ~int | ~float32 }
func new[T any]() *T {
var x T
@@ -77,66 +77,71 @@ func new[T any]() *T {
var _ = new /* ERROR cannot use generic function new */
var _ *int = new[int]()
-func _[T any](map[T /* ERROR invalid map key type T \(missing comparable constraint\) */]int) // w/o constraint we don't know if T is comparable
+func _[T any](map[T /* ERROR invalid map key type T \(missing comparable constraint\) */]int) {} // w/o constraint we don't know if T is comparable
-func f1[T1 any](struct{T1}) int
+func f1[T1 any](struct{T1 /* ERROR cannot be a .* type parameter */ }) int { panic(0) }
var _ = f1[int](struct{T1}{})
type T1 = int
-func f2[t1 any](struct{t1; x float32}) int
+func f2[t1 any](struct{t1 /* ERROR cannot be a .* type parameter */ ; x float32}) int { panic(0) }
var _ = f2[t1](struct{t1; x float32}{})
type t1 = int
-func f3[A, B, C any](A, struct{x B}, func(A, struct{x B}, *C)) int
+func f3[A, B, C any](A, struct{x B}, func(A, struct{x B}, *C)) int { panic(0) }
var _ = f3[int, rune, bool](1, struct{x rune}{}, nil)
// indexing
func _[T any] (x T, i int) { _ = x /* ERROR "cannot index" */ [i] }
-func _[T interface{ type int }] (x T, i int) { _ = x /* ERROR "cannot index" */ [i] }
-func _[T interface{ type string }] (x T, i int) { _ = x[i] }
-func _[T interface{ type []int }] (x T, i int) { _ = x[i] }
-func _[T interface{ type [10]int, *[20]int, map[int]int }] (x T, i int) { _ = x[i] }
-func _[T interface{ type string, []byte }] (x T, i int) { _ = x[i] }
-func _[T interface{ type []int, [1]rune }] (x T, i int) { _ = x /* ERROR "cannot index" */ [i] }
-func _[T interface{ type string, []rune }] (x T, i int) { _ = x /* ERROR "cannot index" */ [i] }
-
-// indexing with various combinations of map types in type lists (see issue #42616)
-func _[T interface{ type []E, map[int]E }, E any](x T, i int) { _ = x[i] }
-func _[T interface{ type []E }, E any](x T, i int) { _ = &x[i] }
-func _[T interface{ type map[int]E }, E any](x T, i int) { _, _ = x[i] } // comma-ok permitted
-func _[T interface{ type []E, map[int]E }, E any](x T, i int) { _ = &x /* ERROR cannot take address */ [i] }
-func _[T interface{ type []E, map[int]E, map[uint]E }, E any](x T, i int) { _ = x /* ERROR cannot index */ [i] } // different map element types
-func _[T interface{ type []E, map[string]E }, E any](x T, i int) { _ = x[i /* ERROR cannot use i */ ] }
+func _[T interface{ ~int }] (x T, i int) { _ = x /* ERROR "cannot index" */ [i] }
+func _[T interface{ ~string }] (x T, i int) { _ = x[i] }
+func _[T interface{ ~[]int }] (x T, i int) { _ = x[i] }
+func _[T interface{ ~[10]int | ~*[20]int | ~map[int]int }] (x T, i int) { _ = x /* ERROR cannot index */ [i] } // map and non-map types
+func _[T interface{ ~string | ~[]byte }] (x T, i int) { _ = x[i] }
+func _[T interface{ ~[]int | ~[1]rune }] (x T, i int) { _ = x /* ERROR "cannot index" */ [i] }
+func _[T interface{ ~string | ~[]rune }] (x T, i int) { _ = x /* ERROR "cannot index" */ [i] }
+
+// indexing with various combinations of map types in type sets (see issue #42616)
+func _[T interface{ ~[]E | ~map[int]E }, E any](x T, i int) { _ = x /* ERROR cannot index */ [i] } // map and non-map types
+func _[T interface{ ~[]E }, E any](x T, i int) { _ = &x[i] }
+func _[T interface{ ~map[int]E }, E any](x T, i int) { _, _ = x[i] } // comma-ok permitted
+func _[T interface{ ~map[int]E }, E any](x T, i int) { _ = &x /* ERROR cannot take address */ [i] }
+func _[T interface{ ~map[int]E | ~map[uint]E }, E any](x T, i int) { _ = x /* ERROR cannot index */ [i] } // different map element types
+func _[T interface{ ~[]E | ~map[string]E }, E any](x T, i int) { _ = x /* ERROR cannot index */ [i] } // map and non-map types
+
+// indexing with various combinations of array and other types in type sets
+func _[T interface{ [10]int }](x T, i int) { _ = x[i]; _ = x[9]; _ = x[10 /* ERROR out of bounds */ ] }
+func _[T interface{ [10]byte | string }](x T, i int) { _ = x[i]; _ = x[9]; _ = x[10 /* ERROR out of bounds */ ] }
+func _[T interface{ [10]int | *[20]int | []int }](x T, i int) { _ = x[i]; _ = x[9]; _ = x[10 /* ERROR out of bounds */ ] }
// slicing
// TODO(gri) implement this
-func _[T interface{ type string }] (x T, i, j, k int) { _ = x /* ERROR invalid operation */ [i:j:k] }
+func _[T interface{ ~string }] (x T, i, j, k int) { _ = x /* ERROR generic slice expressions not yet implemented */ [i:j:k] }
// len/cap built-ins
func _[T any](x T) { _ = len(x /* ERROR invalid argument */ ) }
-func _[T interface{ type int }](x T) { _ = len(x /* ERROR invalid argument */ ) }
-func _[T interface{ type string, []byte, int }](x T) { _ = len(x /* ERROR invalid argument */ ) }
-func _[T interface{ type string }](x T) { _ = len(x) }
-func _[T interface{ type [10]int }](x T) { _ = len(x) }
-func _[T interface{ type []byte }](x T) { _ = len(x) }
-func _[T interface{ type map[int]int }](x T) { _ = len(x) }
-func _[T interface{ type chan int }](x T) { _ = len(x) }
-func _[T interface{ type string, []byte, chan int }](x T) { _ = len(x) }
+func _[T interface{ ~int }](x T) { _ = len(x /* ERROR invalid argument */ ) }
+func _[T interface{ ~string | ~[]byte | ~int }](x T) { _ = len(x /* ERROR invalid argument */ ) }
+func _[T interface{ ~string }](x T) { _ = len(x) }
+func _[T interface{ ~[10]int }](x T) { _ = len(x) }
+func _[T interface{ ~[]byte }](x T) { _ = len(x) }
+func _[T interface{ ~map[int]int }](x T) { _ = len(x) }
+func _[T interface{ ~chan int }](x T) { _ = len(x) }
+func _[T interface{ ~string | ~[]byte | ~chan int }](x T) { _ = len(x) }
func _[T any](x T) { _ = cap(x /* ERROR invalid argument */ ) }
-func _[T interface{ type int }](x T) { _ = cap(x /* ERROR invalid argument */ ) }
-func _[T interface{ type string, []byte, int }](x T) { _ = cap(x /* ERROR invalid argument */ ) }
-func _[T interface{ type string }](x T) { _ = cap(x /* ERROR invalid argument */ ) }
-func _[T interface{ type [10]int }](x T) { _ = cap(x) }
-func _[T interface{ type []byte }](x T) { _ = cap(x) }
-func _[T interface{ type map[int]int }](x T) { _ = cap(x /* ERROR invalid argument */ ) }
-func _[T interface{ type chan int }](x T) { _ = cap(x) }
-func _[T interface{ type []byte, chan int }](x T) { _ = cap(x) }
+func _[T interface{ ~int }](x T) { _ = cap(x /* ERROR invalid argument */ ) }
+func _[T interface{ ~string | ~[]byte | ~int }](x T) { _ = cap(x /* ERROR invalid argument */ ) }
+func _[T interface{ ~string }](x T) { _ = cap(x /* ERROR invalid argument */ ) }
+func _[T interface{ ~[10]int }](x T) { _ = cap(x) }
+func _[T interface{ ~[]byte }](x T) { _ = cap(x) }
+func _[T interface{ ~map[int]int }](x T) { _ = cap(x /* ERROR invalid argument */ ) }
+func _[T interface{ ~chan int }](x T) { _ = cap(x) }
+func _[T interface{ ~[]byte | ~chan int }](x T) { _ = cap(x) }
// range iteration
@@ -144,64 +149,136 @@ func _[T interface{}](x T) {
for range x /* ERROR cannot range */ {}
}
-func _[T interface{ type string, []string }](x T) {
- for range x {}
- for i := range x { _ = i }
- for i, _ := range x { _ = i }
- for i, e := range x /* ERROR must have the same element type */ { _ = i }
- for _, e := range x /* ERROR must have the same element type */ {}
- var e rune
- _ = e
- for _, (e) = range x /* ERROR must have the same element type */ {}
-}
+type myString string
+func _[
+ B1 interface{ string },
+ B2 interface{ string | myString },
-func _[T interface{ type string, []rune, map[int]rune }](x T) {
- for _, e := range x { _ = e }
- for i, e := range x { _ = i; _ = e }
-}
+ C1 interface{ chan int },
+ C2 interface{ chan int | <-chan int },
+ C3 interface{ chan<- int },
-func _[T interface{ type string, []rune, map[string]rune }](x T) {
- for _, e := range x { _ = e }
- for i, e := range x /* ERROR must have the same key type */ { _ = e }
-}
+ S1 interface{ []int },
+ S2 interface{ []int | [10]int },
-func _[T interface{ type string, chan int }](x T) {
- for range x {}
- for i := range x { _ = i }
- for i, _ := range x { _ = i } // TODO(gri) should get an error here: channels only return one value
-}
+ A1 interface{ [10]int },
+ A2 interface{ [10]int | []int },
+
+ P1 interface{ *[10]int },
+ P2 interface{ *[10]int | *[]int },
+
+ M1 interface{ map[string]int },
+ M2 interface{ map[string]int | map[string]string },
+]() {
+ var b0 string
+ for range b0 {}
+ for _ = range b0 {}
+ for _, _ = range b0 {}
+
+ var b1 B1
+ for range b1 {}
+ for _ = range b1 {}
+ for _, _ = range b1 {}
+
+ var b2 B2
+ for range b2 /* ERROR cannot range over b2 .* no structural type */ {}
+
+ var c0 chan int
+ for range c0 {}
+ for _ = range c0 {}
+ for _, _ /* ERROR permits only one iteration variable */ = range c0 {}
+
+ var c1 C1
+ for range c1 {}
+ for _ = range c1 {}
+ for _, _ /* ERROR permits only one iteration variable */ = range c1 {}
+
+ var c2 C2
+ for range c2 /* ERROR cannot range over c2 .* no structural type */ {}
+
+ var c3 C3
+ for range c3 /* ERROR receive from send-only channel */ {}
+
+ var s0 []int
+ for range s0 {}
+ for _ = range s0 {}
+ for _, _ = range s0 {}
+
+ var s1 S1
+ for range s1 {}
+ for _ = range s1 {}
+ for _, _ = range s1 {}
+
+ var s2 S2
+ for range s2 /* ERROR cannot range over s2 .* no structural type */ {}
+
+ var a0 []int
+ for range a0 {}
+ for _ = range a0 {}
+ for _, _ = range a0 {}
+
+ var a1 A1
+ for range a1 {}
+ for _ = range a1 {}
+ for _, _ = range a1 {}
+
+ var a2 A2
+ for range a2 /* ERROR cannot range over a2 .* no structural type */ {}
+
+ var p0 *[10]int
+ for range p0 {}
+ for _ = range p0 {}
+ for _, _ = range p0 {}
+
+ var p1 P1
+ for range p1 {}
+ for _ = range p1 {}
+ for _, _ = range p1 {}
+
+ var p2 P2
+ for range p2 /* ERROR cannot range over p2 .* no structural type */ {}
+
+ var m0 map[string]int
+ for range m0 {}
+ for _ = range m0 {}
+ for _, _ = range m0 {}
+
+ var m1 M1
+ for range m1 {}
+ for _ = range m1 {}
+ for _, _ = range m1 {}
-func _[T interface{ type string, chan<-int }](x T) {
- for i := range x /* ERROR send-only channel */ { _ = i }
+ var m2 M2
+ for range m2 /* ERROR cannot range over m2 .* no structural type */ {}
}
// type inference checks
var _ = new() /* ERROR cannot infer T */
-func f4[A, B, C any](A, B) C
+func f4[A, B, C any](A, B) C { panic(0) }
var _ = f4(1, 2) /* ERROR cannot infer C */
var _ = f4[int, float32, complex128](1, 2)
-func f5[A, B, C any](A, []*B, struct{f []C}) int
+func f5[A, B, C any](A, []*B, struct{f []C}) int { panic(0) }
var _ = f5[int, float32, complex128](0, nil, struct{f []complex128}{})
var _ = f5(0, nil, struct{f []complex128}{}) // ERROR cannot infer
var _ = f5(0, []*float32{new[float32]()}, struct{f []complex128}{})
-func f6[A any](A, []A) int
+func f6[A any](A, []A) int { panic(0) }
var _ = f6(0, nil)
-func f6nil[A any](A) int
+func f6nil[A any](A) int { panic(0) }
var _ = f6nil(nil) // ERROR cannot infer
// type inference with variadic functions
-func f7[T any](...T) T
+func f7[T any](...T) T { panic(0) }
var _ int = f7() /* ERROR cannot infer T */
var _ int = f7(1)
@@ -214,7 +291,7 @@ var _ = f7(float64(1), 2.3)
var _ = f7(1, 2.3 /* ERROR does not match */ )
var _ = f7(1.2, 3 /* ERROR does not match */ )
-func f8[A, B any](A, B, ...B) int
+func f8[A, B any](A, B, ...B) int { panic(0) }
var _ = f8(1) /* ERROR not enough arguments */
var _ = f8(1, 2.3)
@@ -241,7 +318,7 @@ func (T) m3[P any]() {}
type S1[P any] struct { f P }
-func f9[P any](x S1[P])
+func f9[P any](x S1[P]) {}
func _() {
f9[int](S1[int]{42})
@@ -250,7 +327,7 @@ func _() {
type S2[A, B, C any] struct{}
-func f10[X, Y, Z any](a S2[X, int, Z], b S2[X, Y, bool])
+func f10[X, Y, Z any](a S2[X, int, Z], b S2[X, Y, bool]) {}
func _[P any]() {
f10[int, float32, string](S2[int, int, string]{}, S2[int, float32, bool]{})
@@ -261,7 +338,7 @@ func _[P any]() {
// corner case for type inference
// (was bug: after instanting f11, the type-checker didn't mark f11 as non-generic)
-func f11[T any]()
+func f11[T any]() {}
func _() {
f11[int]()
@@ -269,7 +346,7 @@ func _() {
// the previous example was extracted from
-func f12[T interface{m() T}]()
+func f12[T interface{m() T}]() {}
type A[T any] T
@@ -297,15 +374,15 @@ func _[T any] (x T) {
type R0 struct{}
-func (R0) _[T any](x T)
-func (R0 /* ERROR invalid receiver */ ) _[R0 any]() // scope of type parameters starts at "func"
+func (R0) _[T any](x T) {}
+func (R0 /* ERROR invalid receiver */ ) _[R0 any]() {} // scope of type parameters starts at "func"
type R1[A, B any] struct{}
func (_ R1[A, B]) m0(A, B)
-func (_ R1[A, B]) m1[T any](A, B, T) T
+func (_ R1[A, B]) m1[T any](A, B, T) T { panic(0) }
func (_ R1 /* ERROR not a generic type */ [R1, _]) _()
-func (_ R1[A, B]) _[A /* ERROR redeclared */ any](B)
+func (_ R1[A, B]) _[A /* ERROR redeclared */ any](B) {}
func _() {
var r R1[int, string]
@@ -400,7 +477,7 @@ func _[T any](x T) {
}
}
-func _[T interface{type int}](x T) {
+func _[T interface{~int}](x T) {
_ = x /* ERROR not an interface */ .(int)
switch x /* ERROR not an interface */ .(type) {
}
diff --git a/src/cmd/compile/internal/types2/testdata/check/unions.go2 b/src/cmd/compile/internal/types2/testdata/check/unions.go2
new file mode 100644
index 0000000000..bcd7de6644
--- /dev/null
+++ b/src/cmd/compile/internal/types2/testdata/check/unions.go2
@@ -0,0 +1,66 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Check that overlong unions don't bog down type checking.
+// Disallow them for now.
+
+package p
+
+type t int
+
+type (
+ t00 t; t01 t; t02 t; t03 t; t04 t; t05 t; t06 t; t07 t; t08 t; t09 t
+ t10 t; t11 t; t12 t; t13 t; t14 t; t15 t; t16 t; t17 t; t18 t; t19 t
+ t20 t; t21 t; t22 t; t23 t; t24 t; t25 t; t26 t; t27 t; t28 t; t29 t
+ t30 t; t31 t; t32 t; t33 t; t34 t; t35 t; t36 t; t37 t; t38 t; t39 t
+ t40 t; t41 t; t42 t; t43 t; t44 t; t45 t; t46 t; t47 t; t48 t; t49 t
+ t50 t; t51 t; t52 t; t53 t; t54 t; t55 t; t56 t; t57 t; t58 t; t59 t
+ t60 t; t61 t; t62 t; t63 t; t64 t; t65 t; t66 t; t67 t; t68 t; t69 t
+ t70 t; t71 t; t72 t; t73 t; t74 t; t75 t; t76 t; t77 t; t78 t; t79 t
+ t80 t; t81 t; t82 t; t83 t; t84 t; t85 t; t86 t; t87 t; t88 t; t89 t
+ t90 t; t91 t; t92 t; t93 t; t94 t; t95 t; t96 t; t97 t; t98 t; t99 t
+)
+
+type u99 interface {
+ t00|t01|t02|t03|t04|t05|t06|t07|t08|t09|
+ t10|t11|t12|t13|t14|t15|t16|t17|t18|t19|
+ t20|t21|t22|t23|t24|t25|t26|t27|t28|t29|
+ t30|t31|t32|t33|t34|t35|t36|t37|t38|t39|
+ t40|t41|t42|t43|t44|t45|t46|t47|t48|t49|
+ t50|t51|t52|t53|t54|t55|t56|t57|t58|t59|
+ t60|t61|t62|t63|t64|t65|t66|t67|t68|t69|
+ t70|t71|t72|t73|t74|t75|t76|t77|t78|t79|
+ t80|t81|t82|t83|t84|t85|t86|t87|t88|t89|
+ t90|t91|t92|t93|t94|t95|t96|t97|t98
+}
+
+type u100a interface {
+ u99|float32
+}
+
+type u100b interface {
+ u99|float64
+}
+
+type u101 interface {
+ t00|t01|t02|t03|t04|t05|t06|t07|t08|t09|
+ t10|t11|t12|t13|t14|t15|t16|t17|t18|t19|
+ t20|t21|t22|t23|t24|t25|t26|t27|t28|t29|
+ t30|t31|t32|t33|t34|t35|t36|t37|t38|t39|
+ t40|t41|t42|t43|t44|t45|t46|t47|t48|t49|
+ t50|t51|t52|t53|t54|t55|t56|t57|t58|t59|
+ t60|t61|t62|t63|t64|t65|t66|t67|t68|t69|
+ t70|t71|t72|t73|t74|t75|t76|t77|t78|t79|
+ t80|t81|t82|t83|t84|t85|t86|t87|t88|t89|
+ t90|t91|t92|t93|t94|t95|t96|t97|t98|t99|
+ int // ERROR cannot handle more than 100 union terms
+}
+
+type u102 interface {
+ int /* ERROR cannot handle more than 100 union terms */ |string|u100a
+}
+
+type u200 interface {
+ u100a /* ERROR cannot handle more than 100 union terms */ |u100b
+}
diff --git a/src/cmd/compile/internal/types2/testdata/examples/constraints.go2 b/src/cmd/compile/internal/types2/testdata/examples/constraints.go2
new file mode 100644
index 0000000000..f40d18c63e
--- /dev/null
+++ b/src/cmd/compile/internal/types2/testdata/examples/constraints.go2
@@ -0,0 +1,91 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file shows some examples of generic constraint interfaces.
+
+package p
+
+type (
+ // Type lists are processed as unions but an error is reported.
+ // TODO(gri) remove this once the parser doesn't accept type lists anymore.
+ _ interface{
+ type /* ERROR use generalized embedding syntax instead of a type list */ int
+ }
+ _ interface{
+ type /* ERROR use generalized embedding syntax instead of a type list */ int
+ type float32
+ }
+)
+
+type MyInt int
+
+type (
+ // Arbitrary types may be embedded like interfaces.
+ _ interface{int}
+ _ interface{~int}
+
+ // Types may be combined into a union.
+ union interface{int|~string}
+
+ // Union terms must describe disjoint (non-overlapping) type sets.
+ _ interface{int|int /* ERROR overlapping terms int */ }
+ _ interface{int|~ /* ERROR overlapping terms ~int */ int }
+ _ interface{~int|~ /* ERROR overlapping terms ~int */ int }
+ _ interface{~int|MyInt /* ERROR overlapping terms p.MyInt and ~int */ }
+ _ interface{int|interface{}}
+ _ interface{int|~string|union}
+ _ interface{int|~string|interface{int}}
+ _ interface{union|union /* ERROR overlapping terms p.union and p.union */ }
+
+ // For now we do not permit interfaces with methods in unions.
+ _ interface{~ /* ERROR invalid use of ~ */ interface{}}
+ _ interface{int|interface /* ERROR cannot use .* in union */ { m() }}
+)
+
+type (
+ // Tilde is not permitted on defined types or interfaces.
+ foo int
+ bar interface{}
+ _ interface{foo}
+ _ interface{~ /* ERROR invalid use of ~ */ foo }
+ _ interface{~ /* ERROR invalid use of ~ */ bar }
+)
+
+// Stand-alone type parameters are not permitted as elements or terms in unions.
+type (
+ _[T interface{ *T } ] struct{} // ok
+ _[T interface{ int | *T } ] struct{} // ok
+ _[T interface{ T /* ERROR cannot embed a type parameter */ } ] struct{}
+ _[T interface{ ~T /* ERROR cannot embed a type parameter */ } ] struct{}
+ _[T interface{ int|T /* ERROR cannot embed a type parameter */ }] struct{}
+)
+
+// Multiple embedded union elements are intersected. The order in which they
+// appear in the interface doesn't matter since intersection is a symmetric
+// operation.
+
+type myInt1 int
+type myInt2 int
+
+func _[T interface{ myInt1|myInt2; ~int }]() T { return T(0) }
+func _[T interface{ ~int; myInt1|myInt2 }]() T { return T(0) }
+
+// Here the intersections are empty - there's no type that's in the type set of T.
+func _[T interface{ myInt1|myInt2; int }]() T { return T(0 /* ERROR cannot convert */ ) }
+func _[T interface{ int; myInt1|myInt2 }]() T { return T(0 /* ERROR cannot convert */ ) }
+
+// Union elements may be interfaces as long as they don't define
+// any methods or embed comparable.
+
+type (
+ Integer interface{ ~int|~int8|~int16|~int32|~int64 }
+ Unsigned interface{ ~uint|~uint8|~uint16|~uint32|~uint64 }
+ Floats interface{ ~float32|~float64 }
+ Complex interface{ ~complex64|~complex128 }
+ Number interface{ Integer|Unsigned|Floats|Complex }
+ Ordered interface{ Integer|Unsigned|Floats|~string }
+
+ _ interface{ Number | error /* ERROR cannot use error in union */ }
+ _ interface{ Ordered | comparable /* ERROR cannot use comparable in union */ }
+)
diff --git a/src/cmd/compile/internal/types2/testdata/examples/functions.go2 b/src/cmd/compile/internal/types2/testdata/examples/functions.go2
index 0c2a408f02..ef8953cb43 100644
--- a/src/cmd/compile/internal/types2/testdata/examples/functions.go2
+++ b/src/cmd/compile/internal/types2/testdata/examples/functions.go2
@@ -66,7 +66,7 @@ var _ float64 = foo(42, []float64{1.0}, &s)
// Type inference works in a straight-forward manner even
// for variadic functions.
-func variadic[A, B any](A, B, ...B) int
+func variadic[A, B any](A, B, ...B) int { panic(0) }
// var _ = variadic(1) // ERROR not enough arguments
var _ = variadic(1, 2.3)
@@ -98,7 +98,7 @@ func g2b[P, Q any](x P, y Q) {
// Here's an example of a recursive function call with variadic
// arguments and type inference inferring the type parameter of
// the caller (i.e., itself).
-func max[T interface{ type int }](x ...T) T {
+func max[T interface{ ~int }](x ...T) T {
var x0 T
if len(x) > 0 {
x0 = x[0]
@@ -118,9 +118,9 @@ func max[T interface{ type int }](x ...T) T {
// Thus even if a type can be inferred successfully, the function
// call may not be valid.
-func fboth[T any](chan T)
-func frecv[T any](<-chan T)
-func fsend[T any](chan<- T)
+func fboth[T any](chan T) {}
+func frecv[T any](<-chan T) {}
+func fsend[T any](chan<- T) {}
func _() {
var both chan int
@@ -140,9 +140,9 @@ func _() {
fsend(send)
}
-func ffboth[T any](func(chan T))
-func ffrecv[T any](func(<-chan T))
-func ffsend[T any](func(chan<- T))
+func ffboth[T any](func(chan T)) {}
+func ffrecv[T any](func(<-chan T)) {}
+func ffsend[T any](func(chan<- T)) {}
func _() {
var both func(chan int)
@@ -169,9 +169,9 @@ func _() {
// assignment is permitted, parameter passing is permitted as well,
// so type inference should be able to handle these cases well.
-func g1[T any]([]T)
-func g2[T any]([]T, T)
-func g3[T any](*T, ...T)
+func g1[T any]([]T) {}
+func g2[T any]([]T, T) {}
+func g3[T any](*T, ...T) {}
func _() {
type intSlize []int
@@ -195,7 +195,7 @@ func _() {
// Here's a realistic example.
-func append[T any](s []T, t ...T) []T
+func append[T any](s []T, t ...T) []T { panic(0) }
func _() {
var f func()
@@ -208,8 +208,12 @@ func _() {
// (that would indicate a slice type). Thus, generic functions cannot
// have empty type parameter lists, either. This is a syntax error.
-func h[] /* ERROR empty type parameter list */ ()
+func h[] /* ERROR empty type parameter list */ () {}
func _() {
h[] /* ERROR operand */ ()
}
+
+// Parameterized functions must have a function body.
+
+func _ /* ERROR missing function body */ [P any]()
diff --git a/src/cmd/compile/internal/types2/testdata/examples/inference.go2 b/src/cmd/compile/internal/types2/testdata/examples/inference.go2
index b47ce75805..e169aec746 100644
--- a/src/cmd/compile/internal/types2/testdata/examples/inference.go2
+++ b/src/cmd/compile/internal/types2/testdata/examples/inference.go2
@@ -7,10 +7,10 @@
package p
type Ordered interface {
- type int, float64, string
+ ~int|~float64|~string
}
-func min[T Ordered](x, y T) T
+func min[T Ordered](x, y T) T { panic(0) }
func _() {
// min can be called with explicit instantiation.
@@ -37,7 +37,7 @@ func _() {
_ = min("foo", "bar")
}
-func mixed[T1, T2, T3 any](T1, T2, T3)
+func mixed[T1, T2, T3 any](T1, T2, T3) {}
func _() {
// mixed can be called with explicit instantiation.
@@ -54,7 +54,7 @@ func _() {
mixed[int, string](1.1 /* ERROR cannot use 1.1 */ , "", false)
}
-func related1[Slice interface{type []Elem}, Elem any](s Slice, e Elem)
+func related1[Slice interface{~[]Elem}, Elem any](s Slice, e Elem) {}
func _() {
// related1 can be called with explicit instantiation.
@@ -78,7 +78,7 @@ func _() {
related1(si, "foo" /* ERROR cannot use "foo" */ )
}
-func related2[Elem any, Slice interface{type []Elem}](e Elem, s Slice)
+func related2[Elem any, Slice interface{~[]Elem}](e Elem, s Slice) {}
func _() {
// related2 can be called with explicit instantiation.
diff --git a/src/cmd/compile/internal/types2/testdata/examples/methods.go2 b/src/cmd/compile/internal/types2/testdata/examples/methods.go2
index 76c6539e1b..4e87041e54 100644
--- a/src/cmd/compile/internal/types2/testdata/examples/methods.go2
+++ b/src/cmd/compile/internal/types2/testdata/examples/methods.go2
@@ -6,6 +6,8 @@
package p
+import "unsafe"
+
// Parameterized types may have methods.
type T1[A any] struct{ a A }
@@ -94,3 +96,18 @@ func (_ T2[_, _, _]) _() int { return 42 }
type T0 struct{}
func (T0) _() {}
func (T1[A]) _() {}
+
+// A generic receiver type may constrain its type parameter such
+// that it must be a pointer type. Such receiver types are not
+// permitted.
+type T3a[P interface{ ~int | ~string | ~float64 }] P
+
+func (T3a[_]) m() {} // this is ok
+
+type T3b[P interface{ ~unsafe.Pointer }] P
+
+func (T3b /* ERROR invalid receiver */ [_]) m() {}
+
+type T3c[P interface{ *int | *string }] P
+
+func (T3c /* ERROR invalid receiver */ [_]) m() {}
diff --git a/src/cmd/compile/internal/types2/testdata/examples/operations.go2 b/src/cmd/compile/internal/types2/testdata/examples/operations.go2
new file mode 100644
index 0000000000..18e4d6080c
--- /dev/null
+++ b/src/cmd/compile/internal/types2/testdata/examples/operations.go2
@@ -0,0 +1,29 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package p
+
+// indirection
+
+func _[P any](p P) {
+ _ = *p // ERROR cannot indirect p
+}
+
+func _[P interface{ int }](p P) {
+ _ = *p // ERROR cannot indirect p
+}
+
+func _[P interface{ *int }](p P) {
+ _ = *p
+}
+
+func _[P interface{ *int | *string }](p P) {
+ _ = *p // ERROR must have identical base types
+}
+
+type intPtr *int
+
+func _[P interface{ *int | intPtr } ](p P) {
+ var _ int = *p
+}
diff --git a/src/cmd/compile/internal/types2/testdata/examples/types.go2 b/src/cmd/compile/internal/types2/testdata/examples/types.go2
index a7825ed2d9..9ee014452c 100644
--- a/src/cmd/compile/internal/types2/testdata/examples/types.go2
+++ b/src/cmd/compile/internal/types2/testdata/examples/types.go2
@@ -155,30 +155,40 @@ type _ struct {
List /* ERROR List redeclared */ [int]
}
+// Issue #45639: We don't allow this anymore. Keep this code
+// in case we decide to revisit this decision.
+//
// It's possible to declare local types whose underlying types
// are type parameters. As with ordinary type definitions, the
// types underlying properties are "inherited" but the methods
// are not.
-func _[T interface{ m(); type int }]() {
- type L T
- var x L
-
- // m is not defined on L (it is not "inherited" from
- // its underlying type).
- x.m /* ERROR x.m undefined */ ()
-
- // But the properties of T, such that as that it supports
- // the operations of the types given by its type bound,
- // are also the properties of L.
- x++
- _ = x - x
-
- // On the other hand, if we define a local alias for T,
- // that alias stands for T as expected.
- type A = T
- var y A
- y.m()
- _ = y < 0
+// func _[T interface{ m(); ~int }]() {
+// type L T
+// var x L
+//
+// // m is not defined on L (it is not "inherited" from
+// // its underlying type).
+// x.m /* ERROR x.m undefined */ ()
+//
+// // But the properties of T, such that as that it supports
+// // the operations of the types given by its type bound,
+// // are also the properties of L.
+// x++
+// _ = x - x
+//
+// // On the other hand, if we define a local alias for T,
+// // that alias stands for T as expected.
+// type A = T
+// var y A
+// y.m()
+// _ = y < 0
+// }
+
+// It is not permitted to declare a local type whose underlying
+// type is a type parameter not declared by that type declaration.
+func _[T any]() {
+ type _ T // ERROR cannot use function type parameter T as RHS in type declaration
+ type _ [_ any] T // ERROR cannot use function type parameter T as RHS in type declaration
}
// As a special case, an explicit type argument may be omitted
@@ -206,15 +216,15 @@ type B0 interface {}
type B1[_ any] interface{}
type B2[_, _ any] interface{}
-func _[T1 B0]()
-func _[T1 B1[T1]]()
-func _[T1 B2 /* ERROR cannot use generic type .* without instantiation */ ]()
+func _[T1 B0]() {}
+func _[T1 B1[T1]]() {}
+func _[T1 B2 /* ERROR cannot use generic type .* without instantiation */ ]() {}
-func _[T1, T2 B0]()
-func _[T1 B1[T1], T2 B1[T2]]()
-func _[T1, T2 B2 /* ERROR cannot use generic type .* without instantiation */ ]()
+func _[T1, T2 B0]() {}
+func _[T1 B1[T1], T2 B1[T2]]() {}
+func _[T1, T2 B2 /* ERROR cannot use generic type .* without instantiation */ ]() {}
-func _[T1 B0, T2 B1[T2]]() // here B1 applies to T2
+func _[T1 B0, T2 B1[T2]]() {} // here B1 applies to T2
// When the type argument is left away, the type bound is
// instantiated for each type parameter with that type
@@ -232,11 +242,11 @@ func _[A Adder[A], B Adder[B], C Adder[A]]() {
// The type of variables (incl. parameters and return values) cannot
// be an interface with type constraints or be/embed comparable.
type I interface {
- type int
+ ~int
}
var (
- _ interface /* ERROR contains type constraints */ {type int}
+ _ interface /* ERROR contains type constraints */ {~int}
_ I /* ERROR contains type constraints */
)
@@ -267,7 +277,7 @@ func _() {
// (If a type list contains just a single const type, we could
// allow it, but such type lists don't make much sense in the
// first place.)
-func _[T interface { type int, float64 }]() {
+func _[T interface{~int|~float64}]() {
// not valid
const _ = T /* ERROR not constant */ (0)
const _ T /* ERROR invalid constant type T */ = 1
@@ -277,3 +287,19 @@ func _[T interface { type int, float64 }]() {
var _ T = 1
_ = T(0)
}
+
+// It is possible to create composite literals of type parameter
+// type as long as it's possible to create a composite literal
+// of the structural type of the type parameter's constraint.
+func _[P interface{ ~[]int }]() P {
+ return P{}
+ return P{1, 2, 3}
+}
+
+func _[P interface{ ~[]E }, E interface{ map[string]P } ]() P {
+ x := P{}
+ return P{{}}
+ return P{E{}}
+ return P{E{"foo": x}}
+ return P{{"foo": x}, {}}
+}
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39634.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39634.go2
index 2c1299feb0..8d14f8acaf 100644
--- a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39634.go2
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39634.go2
@@ -31,13 +31,14 @@ type x7[A any] struct{ foo7 }
func main7() { var _ foo7 = x7[int]{} }
// crash 8
-type foo8[A any] interface { type A }
-func bar8[A foo8[A]](a A) {}
-func main8() {}
+// Embedding stand-alone type parameters is not permitted for now. Disabled.
+// type foo8[A any] interface { ~A }
+// func bar8[A foo8[A]](a A) {}
+// func main8() {}
// crash 9
-type foo9[A any] interface { type foo9 /* ERROR interface contains type constraints */ [A] }
-func _() { var _ = new(foo9 /* ERROR interface contains type constraints */ [int]) }
+type foo9[A any] interface { foo9 /* ERROR illegal cycle */ [A] }
+func _() { var _ = new(foo9 /* ERROR illegal cycle */ [int]) }
// crash 12
var u /* ERROR cycle */ , i [func /* ERROR used as value */ /* ERROR used as value */ (u, c /* ERROR undeclared */ /* ERROR undeclared */ ) {}(0, len /* ERROR must be called */ /* ERROR must be called */ )]c /* ERROR undeclared */ /* ERROR undeclared */
@@ -49,7 +50,7 @@ func (G15 /* ERROR generic type .* without instantiation */ ) p()
// crash 16
type Foo16[T any] r16 /* ERROR not a type */
-func r16[T any]() Foo16[Foo16[T]]
+func r16[T any]() Foo16[Foo16[T]] { panic(0) }
// crash 17
type Y17 interface{ c() }
@@ -57,7 +58,7 @@ type Z17 interface {
c() Y17
Y17 /* ERROR duplicate method */
}
-func F17[T Z17](T)
+func F17[T Z17](T) {}
// crash 18
type o18[T any] []func(_ o18[[]_ /* ERROR cannot use _ */ ])
@@ -87,5 +88,5 @@ type T26 = interface{ F26[ /* ERROR cannot have type parameters */ Z any]() }
func F26[Z any]() T26 { return F26 /* ERROR without instantiation */ /* ERROR missing method */ [] /* ERROR operand */ }
// crash 27
-func e27[T any]() interface{ x27 /* ERROR not a type */ }
+func e27[T any]() interface{ x27 /* ERROR not a type */ } { panic(0) }
func x27() { e27( /* ERROR cannot infer T */ ) } \ No newline at end of file
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39680.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39680.go2
index 9bc26f3546..e56bc35475 100644
--- a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39680.go2
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39680.go2
@@ -4,16 +4,19 @@
package p
+// Embedding stand-alone type parameters is not permitted for now. Disabled.
+
+/*
import "fmt"
// Minimal test case.
-func _[T interface{type T}](x T) T{
+func _[T interface{~T}](x T) T{
return x
}
// Test case from issue.
type constr[T any] interface {
- type T
+ ~T
}
func Print[T constr[T]](s []T) {
@@ -25,3 +28,4 @@ func Print[T constr[T]](s []T) {
func f() {
Print([]string{"Hello, ", "playground\n"})
}
+*/
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39693.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39693.go2
index 316ab1982e..301c13be41 100644
--- a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39693.go2
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39693.go2
@@ -4,11 +4,20 @@
package p
-type Number interface {
- int /* ERROR int is not an interface */
- float64 /* ERROR float64 is not an interface */
+type Number1 interface {
+ // embedding non-interface types is permitted
+ int
+ float64
}
-func Add[T Number](a, b T) T {
+func Add1[T Number1](a, b T) T {
return a /* ERROR not defined */ + b
}
+
+type Number2 interface {
+ int|float64
+}
+
+func Add2[T Number2](a, b T) T {
+ return a + b
+}
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39699.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39699.go2
index 75491e7e26..72f83997c2 100644
--- a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39699.go2
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39699.go2
@@ -8,7 +8,7 @@ type T0 interface{
}
type T1 interface{
- type int
+ ~int
}
type T2 interface{
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39711.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39711.go2
index df621a4c17..85eb0a78fe 100644
--- a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39711.go2
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39711.go2
@@ -7,5 +7,7 @@ package p
// Do not report a duplicate type error for this type list.
// (Check types after interfaces have been completed.)
type _ interface {
- type interface{ Error() string }, interface{ String() string }
+ // TODO(gri) Once we have full type sets we can enable this again.
+ // Fow now we don't permit interfaces in type lists.
+ // type interface{ Error() string }, interface{ String() string }
}
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39723.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39723.go2
index 55464e6b77..d5311ed3e7 100644
--- a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39723.go2
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39723.go2
@@ -6,4 +6,4 @@ package p
// A constraint must be an interface; it cannot
// be a type parameter, for instance.
-func _[A interface{ type interface{} }, B A /* ERROR not an interface */ ]()
+func _[A interface{ ~int }, B A /* ERROR not an interface */ ]() {}
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39725.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39725.go2
index e19b6770bf..62dc45a596 100644
--- a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39725.go2
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39725.go2
@@ -4,13 +4,13 @@
package p
-func f1[T1, T2 any](T1, T2, struct{a T1; b T2})
+func f1[T1, T2 any](T1, T2, struct{a T1; b T2}) {}
func _() {
f1(42, string("foo"), struct /* ERROR does not match inferred type struct\{a int; b string\} */ {a, b int}{})
}
// simplified test case from issue
-func f2[T any](_ []T, _ func(T))
+func f2[T any](_ []T, _ func(T)) {}
func _() {
f2([]string{}, func /* ERROR does not match inferred type func\(string\) */ (f []byte) {})
}
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39755.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39755.go2
index b7ab68818e..257b73a2fb 100644
--- a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39755.go2
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39755.go2
@@ -4,14 +4,14 @@
package p
-func _[T interface{type map[string]int}](x T) {
+func _[T interface{~map[string]int}](x T) {
_ = x == nil
}
// simplified test case from issue
type PathParamsConstraint interface {
- type map[string]string, []struct{key, value string}
+ ~map[string]string | ~[]struct{key, value string}
}
type PathParams[T PathParamsConstraint] struct {
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39938.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39938.go2
index 76e7e369ca..0da6e103fd 100644
--- a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39938.go2
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39938.go2
@@ -8,8 +8,8 @@ package p
type E0[P any] P
type E1[P any] *P
-type E2[P any] struct{ P }
-type E3[P any] struct{ *P }
+type E2[P any] struct{ _ P }
+type E3[P any] struct{ _ *P }
type T0 /* ERROR illegal cycle */ struct {
_ E0[T0]
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39948.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39948.go2
index c2b460902c..e38e57268d 100644
--- a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39948.go2
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39948.go2
@@ -5,5 +5,5 @@
package p
type T[P any] interface{
- P // ERROR P is a type parameter, not an interface
+ P // ERROR cannot embed a type parameter
}
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39976.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39976.go2
index 3db4eae012..d703da90a2 100644
--- a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39976.go2
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue39976.go2
@@ -7,7 +7,7 @@ package p
type policy[K, V any] interface{}
type LRU[K, V any] struct{}
-func NewCache[K, V any](p policy[K, V])
+func NewCache[K, V any](p policy[K, V]) {}
func _() {
var lru LRU[int, string]
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue40038.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue40038.go2
index 8948d61caa..0981a335da 100644
--- a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue40038.go2
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue40038.go2
@@ -8,7 +8,7 @@ type A[T any] int
func (A[T]) m(A[T])
-func f[P interface{m(P)}]()
+func f[P interface{m(P)}]() {}
func _() {
_ = f[A[int]]
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue40056.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue40056.go2
index 747aab49dd..a3f3eecca0 100644
--- a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue40056.go2
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue40056.go2
@@ -10,6 +10,6 @@ func _() {
type S struct {}
-func NewS[T any]() *S
+func NewS[T any]() *S { panic(0) }
func (_ *S /* ERROR S is not a generic type */ [T]) M()
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue40301.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue40301.go2
index 5d97855f8a..c78f9a1fa0 100644
--- a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue40301.go2
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue40301.go2
@@ -7,6 +7,6 @@ package p
import "unsafe"
func _[T any](x T) {
- _ = unsafe /* ERROR undefined */ .Alignof(x)
- _ = unsafe /* ERROR undefined */ .Sizeof(x)
+ _ = unsafe.Alignof(x)
+ _ = unsafe.Sizeof(x)
}
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue40684.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue40684.go2
index 0269c3a62c..58d0f69f65 100644
--- a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue40684.go2
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue40684.go2
@@ -6,8 +6,8 @@ package p
type T[_ any] int
-func f[_ any]()
-func g[_, _ any]()
+func f[_ any]() {}
+func g[_, _ any]() {}
func _() {
_ = f[T /* ERROR without instantiation */ ]
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue40789.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue40789.go2
new file mode 100644
index 0000000000..9eea4ad60a
--- /dev/null
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue40789.go2
@@ -0,0 +1,37 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+import "fmt"
+
+func main() {
+ m := map[string]int{
+ "a": 6,
+ "b": 7,
+ }
+ fmt.Println(copyMap[map[string]int, string, int](m))
+}
+
+type Map[K comparable, V any] interface {
+ map[K] V
+}
+
+func copyMap[M Map[K, V], K comparable, V any](m M) M {
+ m1 := make(M)
+ for k, v := range m {
+ m1[k] = v
+ }
+ return m1
+}
+
+// simpler test case from the same issue
+
+type A[X comparable] interface {
+ []X
+}
+
+func f[B A[X], X comparable]() B {
+ return nil
+}
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue41124.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue41124.go2
index 61f766bcbd..4642ab60fc 100644
--- a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue41124.go2
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue41124.go2
@@ -7,7 +7,7 @@ package p
// Test case from issue.
type Nat interface {
- type Zero, Succ
+ Zero|Succ
}
type Zero struct{}
@@ -22,7 +22,7 @@ type I1 interface {
}
type I2 interface {
- type int
+ ~int
}
type I3 interface {
@@ -47,7 +47,7 @@ type _ struct{
}
type _ struct{
- I3 // ERROR interface contains type constraints
+ I3 // ERROR interface is .* comparable
}
// General composite types.
@@ -59,19 +59,19 @@ type (
_ []I1 // ERROR interface is .* comparable
_ []I2 // ERROR interface contains type constraints
- _ *I3 // ERROR interface contains type constraints
+ _ *I3 // ERROR interface is .* comparable
_ map[I1 /* ERROR interface is .* comparable */ ]I2 // ERROR interface contains type constraints
- _ chan I3 // ERROR interface contains type constraints
+ _ chan I3 // ERROR interface is .* comparable
_ func(I1 /* ERROR interface is .* comparable */ )
_ func() I2 // ERROR interface contains type constraints
)
// Other cases.
-var _ = [...]I3 /* ERROR interface contains type constraints */ {}
+var _ = [...]I3 /* ERROR interface is .* comparable */ {}
func _(x interface{}) {
- _ = x.(I3 /* ERROR interface contains type constraints */ )
+ _ = x.(I3 /* ERROR interface is .* comparable */ )
}
type T1[_ any] struct{}
@@ -79,9 +79,9 @@ type T3[_, _, _ any] struct{}
var _ T1[I2 /* ERROR interface contains type constraints */ ]
var _ T3[int, I2 /* ERROR interface contains type constraints */ , float32]
-func f1[_ any]() int
+func f1[_ any]() int { panic(0) }
var _ = f1[I2 /* ERROR interface contains type constraints */ ]()
-func f3[_, _, _ any]() int
+func f3[_, _, _ any]() int { panic(0) }
var _ = f3[int, I2 /* ERROR interface contains type constraints */ , float32]()
func _(x interface{}) {
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue42758.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue42758.go2
index 698cb8a16b..bf0031f5d2 100644
--- a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue42758.go2
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue42758.go2
@@ -17,7 +17,7 @@ func _[T any](x interface{}){
}
type constraint interface {
- type int
+ ~int
}
func _[T constraint](x interface{}){
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue43671.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue43671.go2
new file mode 100644
index 0000000000..6cc3801cc9
--- /dev/null
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue43671.go2
@@ -0,0 +1,58 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package p
+
+type C0 interface{ int }
+type C1 interface{ chan int }
+type C2 interface{ chan int | <-chan int }
+type C3 interface{ chan int | chan float32 }
+type C4 interface{ chan int | chan<- int }
+type C5[T any] interface{ ~chan T | <-chan T }
+
+func _[T any](ch T) {
+ <-ch // ERROR cannot receive from non-channel
+}
+
+func _[T C0](ch T) {
+ <-ch // ERROR cannot receive from non-channel
+}
+
+func _[T C1](ch T) {
+ <-ch
+}
+
+func _[T C2](ch T) {
+ <-ch
+}
+
+func _[T C3](ch T) {
+ <-ch // ERROR channels of ch .* must have the same element type
+}
+
+func _[T C4](ch T) {
+ <-ch // ERROR cannot receive from send-only channel
+}
+
+func _[T C5[X], X any](ch T, x X) {
+ x = <-ch
+}
+
+// test case from issue, slightly modified
+type RecvChan[T any] interface {
+ ~chan T | ~<-chan T
+}
+
+func _[T any, C RecvChan[T]](ch C) T {
+ return <-ch
+}
+
+func f[T any, C interface{ chan T }](ch C) T {
+ return <-ch
+}
+
+func _(ch chan int) {
+ var x int = f(ch) // test constraint type inference for this case
+ _ = x
+}
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue45548.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue45548.go2
index b1e42497e8..b8ba0ad4a7 100644
--- a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue45548.go2
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue45548.go2
@@ -4,7 +4,7 @@
package p
-func f[F interface{type *Q}, G interface{type *R}, Q, R any](q Q, r R) {}
+func f[F interface{~*Q}, G interface{~*R}, Q, R any](q Q, r R) {}
func _() {
f[*float64, *int](1, 2)
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue45635.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue45635.go2
index 65662cdc76..2937959105 100644
--- a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue45635.go2
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue45635.go2
@@ -13,7 +13,7 @@ type N[T any] struct{}
var _ N[] /* ERROR expecting type */
type I interface {
- type map[int]int, []int
+ ~[]int
}
func _[T I](i, j int) {
@@ -27,6 +27,5 @@ func _[T I](i, j int) {
_ = s[i, j /* ERROR more than one index */ ]
var t T
- // TODO(gri) fix multiple error below
- _ = t[i, j /* ERROR more than one index */ /* ERROR more than one index */ ]
+ _ = t[i, j /* ERROR more than one index */ ]
}
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue45639.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue45639.go2
new file mode 100644
index 0000000000..441fb4cb34
--- /dev/null
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue45639.go2
@@ -0,0 +1,12 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package P
+
+// It is not permitted to declare a local type whose underlying
+// type is a type parameters not declared by that type declaration.
+func _[T any]() {
+ type _ T // ERROR cannot use function type parameter T as RHS in type declaration
+ type _ [_ any] T // ERROR cannot use function type parameter T as RHS in type declaration
+}
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue45985.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue45985.go2
index 7678e348ef..f25b9d2b26 100644
--- a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue45985.go2
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue45985.go2
@@ -5,7 +5,7 @@
package issue45985
// TODO(gri): this error should be on app[int] below.
-func app[S /* ERROR "type S = S does not match" */ interface{ type []T }, T any](s S, e T) S {
+func app[S /* ERROR "type S = S does not match" */ interface{ ~[]T }, T any](s S, e T) S {
return append(s, e)
}
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue46090.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue46090.go2
new file mode 100644
index 0000000000..81b31974c8
--- /dev/null
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue46090.go2
@@ -0,0 +1,9 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// The predeclared type comparable is not visible before Go 1.18.
+
+package go1_17
+
+type _ comparable // ERROR undeclared
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue46275.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue46275.go2
new file mode 100644
index 0000000000..f41ae26e4b
--- /dev/null
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue46275.go2
@@ -0,0 +1,26 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package issue46275
+
+type N[T any] struct {
+ *N[T]
+ t T
+}
+
+func (n *N[T]) Elem() T {
+ return n.t
+}
+
+type I interface {
+ Elem() string
+}
+
+func _() {
+ var n1 *N[string]
+ var _ I = n1
+ type NS N[string]
+ var n2 *NS
+ var _ I = n2
+}
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue46583.src b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue46583.src
new file mode 100644
index 0000000000..da1f1ffbba
--- /dev/null
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue46583.src
@@ -0,0 +1,28 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package p
+
+type T1 struct{}
+func (t T1) m(int) {}
+var f1 func(T1)
+
+type T2 struct{}
+func (t T2) m(x int) {}
+var f2 func(T2)
+
+type T3 struct{}
+func (T3) m(int) {}
+var f3 func(T3)
+
+type T4 struct{}
+func (T4) m(x int) {}
+var f4 func(T4)
+
+func _() {
+ f1 = T1 /* ERROR func\(T1, int\) */ .m
+ f2 = T2 /* ERROR func\(t T2, x int\) */ .m
+ f3 = T3 /* ERROR func\(T3, int\) */ .m
+ f4 = T4 /* ERROR func\(_ T4, x int\) */ .m
+}
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue47031.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue47031.go2
new file mode 100644
index 0000000000..b184f9b5b7
--- /dev/null
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue47031.go2
@@ -0,0 +1,20 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package p
+
+type Mer interface { M() }
+
+func F[T Mer](p *T) {
+ p.M /* ERROR p\.M undefined */ ()
+}
+
+type MyMer int
+
+func (MyMer) M() {}
+
+func _() {
+ F(new(MyMer))
+ F[Mer](nil)
+}
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue47115.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue47115.go2
new file mode 100644
index 0000000000..00828eb997
--- /dev/null
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue47115.go2
@@ -0,0 +1,40 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package p
+
+type C0 interface{ int }
+type C1 interface{ chan int }
+type C2 interface{ chan int | <-chan int }
+type C3 interface{ chan int | chan float32 }
+type C4 interface{ chan int | chan<- int }
+type C5[T any] interface{ ~chan T | chan<- T }
+
+func _[T any](ch T) {
+ ch /* ERROR cannot send to non-channel */ <- 0
+}
+
+func _[T C0](ch T) {
+ ch /* ERROR cannot send to non-channel */ <- 0
+}
+
+func _[T C1](ch T) {
+ ch <- 0
+}
+
+func _[T C2](ch T) {
+ ch /* ERROR cannot send to receive-only channel */ <- 0
+}
+
+func _[T C3](ch T) {
+ ch /* ERROR channels of ch .* must have the same element type */ <- 0
+}
+
+func _[T C4](ch T) {
+ ch <- 0
+}
+
+func _[T C5[X], X any](ch T, x X) {
+ ch <- x
+}
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue47127.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue47127.go2
new file mode 100644
index 0000000000..108d600a38
--- /dev/null
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue47127.go2
@@ -0,0 +1,37 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Embedding of stand-alone type parameters is not permitted.
+
+package p
+
+type (
+ _[P any] interface{ *P | []P | chan P | map[string]P }
+ _[P any] interface{ P /* ERROR "cannot embed a type parameter" */ }
+ _[P any] interface{ ~P /* ERROR "cannot embed a type parameter" */ }
+ _[P any] interface{ int | P /* ERROR "cannot embed a type parameter" */ }
+ _[P any] interface{ int | ~P /* ERROR "cannot embed a type parameter" */ }
+)
+
+func _[P any]() {
+ type (
+ _[P any] interface{ *P | []P | chan P | map[string]P }
+ _[P any] interface{ P /* ERROR "cannot embed a type parameter" */ }
+ _[P any] interface{ ~P /* ERROR "cannot embed a type parameter" */ }
+ _[P any] interface{ int | P /* ERROR "cannot embed a type parameter" */ }
+ _[P any] interface{ int | ~P /* ERROR "cannot embed a type parameter" */ }
+
+ _ interface{ *P | []P | chan P | map[string]P }
+ _ interface{ P /* ERROR "cannot embed a type parameter" */ }
+ _ interface{ ~P /* ERROR "cannot embed a type parameter" */ }
+ _ interface{ int | P /* ERROR "cannot embed a type parameter" */ }
+ _ interface{ int | ~P /* ERROR "cannot embed a type parameter" */ }
+ )
+}
+
+func _[P any, Q interface{ *P | []P | chan P | map[string]P }]() {}
+func _[P any, Q interface{ P /* ERROR "cannot embed a type parameter" */ }]() {}
+func _[P any, Q interface{ ~P /* ERROR "cannot embed a type parameter" */ }]() {}
+func _[P any, Q interface{ int | P /* ERROR "cannot embed a type parameter" */ }]() {}
+func _[P any, Q interface{ int | ~P /* ERROR "cannot embed a type parameter" */ }]() {}
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue47411.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue47411.go2
new file mode 100644
index 0000000000..77281a19a2
--- /dev/null
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue47411.go2
@@ -0,0 +1,26 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package p
+
+func f[_ comparable]() {}
+func g[_ interface{interface{comparable; ~int|~string}}]() {}
+
+func _[P comparable,
+ Q interface{ comparable; ~int|~string },
+ R any, // not comparable
+ S interface{ comparable; ~func() }, // not comparable
+]() {
+ _ = f[int]
+ _ = f[P]
+ _ = f[Q]
+ _ = f[func( /* ERROR does not satisfy comparable */ )]
+ _ = f[R /* ERROR R has no constraints */ ]
+
+ _ = g[int]
+ _ = g[P /* ERROR P has no type constraints */ ]
+ _ = g[Q]
+ _ = g[func( /* ERROR does not satisfy comparable */ )]
+ _ = g[R /* ERROR R has no constraints */ ]
+}
diff --git a/src/cmd/compile/internal/types2/testdata/fixedbugs/issue47747.go2 b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue47747.go2
new file mode 100644
index 0000000000..af52056bef
--- /dev/null
+++ b/src/cmd/compile/internal/types2/testdata/fixedbugs/issue47747.go2
@@ -0,0 +1,68 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package p
+
+type T1[P any] P
+
+func (T1[_]) m() {}
+
+func _[P any](x *T1[P]) {
+ // x.m exists because x is of type *T1 where T1 is a defined type
+ // (even though under(T1) is a type parameter)
+ x.m()
+}
+
+
+func _[P interface{ m() }](x P) {
+ x.m()
+ // (&x).m doesn't exist because &x is of type *P
+ // and pointers to type parameters don't have methods
+ (&x).m /* ERROR \*P has no field or method m */ ()
+}
+
+
+type T2 interface{ m() }
+
+func _(x *T2) {
+ // x.m doesn't exists because x is of type *T2
+ // and pointers to interfaces don't have methods
+ x.m /* ERROR \*T2 has no field or method m */()
+}
+
+// Test case 1 from issue
+
+type Fooer1[t any] interface {
+ Foo(Barer[t])
+}
+type Barer[t any] interface {
+ Bar(t)
+}
+
+type Foo1[t any] t
+type Bar[t any] t
+
+func (l Foo1[t]) Foo(v Barer[t]) { v.Bar(t(l)) }
+func (b *Bar[t]) Bar(l t) { *b = Bar[t](l) }
+
+func _[t any](f Fooer1[t]) t {
+ var b Bar[t]
+ f.Foo(&b)
+ return t(b)
+}
+
+// Test case 2 from issue
+
+type Fooer2[t any] interface {
+ Foo()
+}
+
+type Foo2[t any] t
+
+func (f *Foo2[t]) Foo() {}
+
+func _[t any](v t) {
+ var f = Foo2[t](v)
+ _ = Fooer2[t](&f)
+}
diff --git a/src/cmd/compile/internal/types2/tuple.go b/src/cmd/compile/internal/types2/tuple.go
new file mode 100644
index 0000000000..a3946beab5
--- /dev/null
+++ b/src/cmd/compile/internal/types2/tuple.go
@@ -0,0 +1,36 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package types2
+
+// A Tuple represents an ordered list of variables; a nil *Tuple is a valid (empty) tuple.
+// Tuples are used as components of signatures and to represent the type of multiple
+// assignments; they are not first class types of Go.
+type Tuple struct {
+ vars []*Var
+}
+
+// NewTuple returns a new tuple for the given variables.
+func NewTuple(x ...*Var) *Tuple {
+ if len(x) > 0 {
+ return &Tuple{vars: x}
+ }
+ // TODO(gri) Don't represent empty tuples with a (*Tuple)(nil) pointer;
+ // it's too subtle and causes problems.
+ return nil
+}
+
+// Len returns the number variables of tuple t.
+func (t *Tuple) Len() int {
+ if t != nil {
+ return len(t.vars)
+ }
+ return 0
+}
+
+// At returns the i'th variable of tuple t.
+func (t *Tuple) At(i int) *Var { return t.vars[i] }
+
+func (t *Tuple) Underlying() Type { return t }
+func (t *Tuple) String() string { return TypeString(t, nil) }
diff --git a/src/cmd/compile/internal/types2/type.go b/src/cmd/compile/internal/types2/type.go
index e6c260ff67..4b8642aa96 100644
--- a/src/cmd/compile/internal/types2/type.go
+++ b/src/cmd/compile/internal/types2/type.go
@@ -4,12 +4,6 @@
package types2
-import (
- "cmd/compile/internal/syntax"
- "fmt"
- "sync/atomic"
-)
-
// A Type represents a type of Go.
// All types implement the Type interface.
type Type interface {
@@ -22,895 +16,55 @@ type Type interface {
String() string
}
-// BasicKind describes the kind of basic type.
-type BasicKind int
-
-const (
- Invalid BasicKind = iota // type is invalid
-
- // predeclared types
- Bool
- Int
- Int8
- Int16
- Int32
- Int64
- Uint
- Uint8
- Uint16
- Uint32
- Uint64
- Uintptr
- Float32
- Float64
- Complex64
- Complex128
- String
- UnsafePointer
-
- // types for untyped values
- UntypedBool
- UntypedInt
- UntypedRune
- UntypedFloat
- UntypedComplex
- UntypedString
- UntypedNil
-
- // aliases
- Byte = Uint8
- Rune = Int32
-)
-
-// BasicInfo is a set of flags describing properties of a basic type.
-type BasicInfo int
-
-// Properties of basic types.
-const (
- IsBoolean BasicInfo = 1 << iota
- IsInteger
- IsUnsigned
- IsFloat
- IsComplex
- IsString
- IsUntyped
-
- IsOrdered = IsInteger | IsFloat | IsString
- IsNumeric = IsInteger | IsFloat | IsComplex
- IsConstType = IsBoolean | IsNumeric | IsString
-)
-
-// A Basic represents a basic type.
-type Basic struct {
- kind BasicKind
- info BasicInfo
- name string
-}
-
-// Kind returns the kind of basic type b.
-func (b *Basic) Kind() BasicKind { return b.kind }
-
-// Info returns information about properties of basic type b.
-func (b *Basic) Info() BasicInfo { return b.info }
-
-// Name returns the name of basic type b.
-func (b *Basic) Name() string { return b.name }
-
-// An Array represents an array type.
-type Array struct {
- len int64
- elem Type
-}
-
-// NewArray returns a new array type for the given element type and length.
-// A negative length indicates an unknown length.
-func NewArray(elem Type, len int64) *Array { return &Array{len: len, elem: elem} }
-
-// Len returns the length of array a.
-// A negative result indicates an unknown length.
-func (a *Array) Len() int64 { return a.len }
-
-// Elem returns element type of array a.
-func (a *Array) Elem() Type { return a.elem }
-
-// A Slice represents a slice type.
-type Slice struct {
- elem Type
-}
-
-// NewSlice returns a new slice type for the given element type.
-func NewSlice(elem Type) *Slice { return &Slice{elem: elem} }
-
-// Elem returns the element type of slice s.
-func (s *Slice) Elem() Type { return s.elem }
-
-// A Struct represents a struct type.
-type Struct struct {
- fields []*Var
- tags []string // field tags; nil if there are no tags
-}
-
-// NewStruct returns a new struct with the given fields and corresponding field tags.
-// If a field with index i has a tag, tags[i] must be that tag, but len(tags) may be
-// only as long as required to hold the tag with the largest index i. Consequently,
-// if no field has a tag, tags may be nil.
-func NewStruct(fields []*Var, tags []string) *Struct {
- var fset objset
- for _, f := range fields {
- if f.name != "_" && fset.insert(f) != nil {
- panic("multiple fields with the same name")
- }
- }
- if len(tags) > len(fields) {
- panic("more tags than fields")
- }
- return &Struct{fields: fields, tags: tags}
-}
-
-// NumFields returns the number of fields in the struct (including blank and embedded fields).
-func (s *Struct) NumFields() int { return len(s.fields) }
-
-// Field returns the i'th field for 0 <= i < NumFields().
-func (s *Struct) Field(i int) *Var { return s.fields[i] }
-
-// Tag returns the i'th field tag for 0 <= i < NumFields().
-func (s *Struct) Tag(i int) string {
- if i < len(s.tags) {
- return s.tags[i]
- }
- return ""
-}
-
-// A Pointer represents a pointer type.
-type Pointer struct {
- base Type // element type
-}
-
-// NewPointer returns a new pointer type for the given element (base) type.
-func NewPointer(elem Type) *Pointer { return &Pointer{base: elem} }
-
-// Elem returns the element type for the given pointer p.
-func (p *Pointer) Elem() Type { return p.base }
-
-// A Tuple represents an ordered list of variables; a nil *Tuple is a valid (empty) tuple.
-// Tuples are used as components of signatures and to represent the type of multiple
-// assignments; they are not first class types of Go.
-type Tuple struct {
- vars []*Var
-}
-
-// NewTuple returns a new tuple for the given variables.
-func NewTuple(x ...*Var) *Tuple {
- if len(x) > 0 {
- return &Tuple{vars: x}
- }
- // TODO(gri) Don't represent empty tuples with a (*Tuple)(nil) pointer;
- // it's too subtle and causes problems.
- return nil
-}
-
-// Len returns the number variables of tuple t.
-func (t *Tuple) Len() int {
- if t != nil {
- return len(t.vars)
- }
- return 0
-}
-
-// At returns the i'th variable of tuple t.
-func (t *Tuple) At(i int) *Var { return t.vars[i] }
-
-// A Signature represents a (non-builtin) function or method type.
-// The receiver is ignored when comparing signatures for identity.
-type Signature struct {
- // We need to keep the scope in Signature (rather than passing it around
- // and store it in the Func Object) because when type-checking a function
- // literal we call the general type checker which returns a general Type.
- // We then unpack the *Signature and use the scope for the literal body.
- rparams []*TypeName // receiver type parameters from left to right; or nil
- tparams []*TypeName // type parameters from left to right; or nil
- scope *Scope // function scope, present for package-local signatures
- recv *Var // nil if not a method
- params *Tuple // (incoming) parameters from left to right; or nil
- results *Tuple // (outgoing) results from left to right; or nil
- variadic bool // true if the last parameter's type is of the form ...T (or string, for append built-in only)
-}
-
-// NewSignature returns a new function type for the given receiver, parameters,
-// and results, either of which may be nil. If variadic is set, the function
-// is variadic, it must have at least one parameter, and the last parameter
-// must be of unnamed slice type.
-func NewSignature(recv *Var, params, results *Tuple, variadic bool) *Signature {
- if variadic {
- n := params.Len()
- if n == 0 {
- panic("types2.NewSignature: variadic function must have at least one parameter")
- }
- if _, ok := params.At(n - 1).typ.(*Slice); !ok {
- panic("types2.NewSignature: variadic parameter must be of unnamed slice type")
- }
- }
- return &Signature{recv: recv, params: params, results: results, variadic: variadic}
-}
-
-// Recv returns the receiver of signature s (if a method), or nil if a
-// function. It is ignored when comparing signatures for identity.
-//
-// For an abstract method, Recv returns the enclosing interface either
-// as a *Named or an *Interface. Due to embedding, an interface may
-// contain methods whose receiver type is a different interface.
-func (s *Signature) Recv() *Var { return s.recv }
-
-// TParams returns the type parameters of signature s, or nil.
-func (s *Signature) TParams() []*TypeName { return s.tparams }
-
-// RParams returns the receiver type params of signature s, or nil.
-func (s *Signature) RParams() []*TypeName { return s.rparams }
-
-// SetTParams sets the type parameters of signature s.
-func (s *Signature) SetTParams(tparams []*TypeName) { s.tparams = tparams }
-
-// Params returns the parameters of signature s, or nil.
-func (s *Signature) Params() *Tuple { return s.params }
-
-// Results returns the results of signature s, or nil.
-func (s *Signature) Results() *Tuple { return s.results }
-
-// Variadic reports whether the signature s is variadic.
-func (s *Signature) Variadic() bool { return s.variadic }
-
-// A Sum represents a set of possible types.
-// Sums are currently used to represent type lists of interfaces
-// and thus the underlying types of type parameters; they are not
-// first class types of Go.
-type Sum struct {
- types []Type // types are unique
-}
-
-// NewSum returns a new Sum type consisting of the provided
-// types if there are more than one. If there is exactly one
-// type, it returns that type. If the list of types is empty
-// the result is nil.
-func NewSum(types []Type) Type {
- if len(types) == 0 {
- return nil
- }
-
- // What should happen if types contains a sum type?
- // Do we flatten the types list? For now we check
- // and panic. This should not be possible for the
- // current use case of type lists.
- // TODO(gri) Come up with the rules for sum types.
- for _, t := range types {
- if _, ok := t.(*Sum); ok {
- panic("sum type contains sum type - unimplemented")
- }
- }
-
- if len(types) == 1 {
- return types[0]
- }
- return &Sum{types: types}
-}
-
-// is reports whether all types in t satisfy pred.
-func (s *Sum) is(pred func(Type) bool) bool {
- if s == nil {
- return false
- }
- for _, t := range s.types {
- if !pred(t) {
- return false
- }
- }
- return true
-}
-
-// An Interface represents an interface type.
-type Interface struct {
- methods []*Func // ordered list of explicitly declared methods
- types Type // (possibly a Sum) type declared with a type list (TODO(gri) need better field name)
- embeddeds []Type // ordered list of explicitly embedded types
-
- allMethods []*Func // ordered list of methods declared with or embedded in this interface (TODO(gri): replace with mset)
- allTypes Type // intersection of all embedded and locally declared types (TODO(gri) need better field name)
-
- obj Object // type declaration defining this interface; or nil (for better error messages)
-}
-
-// unpack unpacks a type into a list of types.
-// TODO(gri) Try to eliminate the need for this function.
-func unpack(typ Type) []Type {
- if typ == nil {
- return nil
- }
- if sum := asSum(typ); sum != nil {
- return sum.types
- }
- return []Type{typ}
-}
-
-// is reports whether interface t represents types that all satisfy pred.
-func (t *Interface) is(pred func(Type) bool) bool {
- if t.allTypes == nil {
- return false // we must have at least one type! (was bug)
- }
- for _, t := range unpack(t.allTypes) {
- if !pred(t) {
- return false
- }
- }
- return true
-}
-
-// emptyInterface represents the empty (completed) interface
-var emptyInterface = Interface{allMethods: markComplete}
-
-// markComplete is used to mark an empty interface as completely
-// set up by setting the allMethods field to a non-nil empty slice.
-var markComplete = make([]*Func, 0)
-
-// NewInterface returns a new (incomplete) interface for the given methods and embedded types.
-// Each embedded type must have an underlying type of interface type.
-// NewInterface takes ownership of the provided methods and may modify their types by setting
-// missing receivers. To compute the method set of the interface, Complete must be called.
-//
-// Deprecated: Use NewInterfaceType instead which allows any (even non-defined) interface types
-// to be embedded. This is necessary for interfaces that embed alias type names referring to
-// non-defined (literal) interface types.
-func NewInterface(methods []*Func, embeddeds []*Named) *Interface {
- tnames := make([]Type, len(embeddeds))
- for i, t := range embeddeds {
- tnames[i] = t
- }
- return NewInterfaceType(methods, tnames)
-}
-
-// NewInterfaceType returns a new (incomplete) interface for the given methods and embedded types.
-// Each embedded type must have an underlying type of interface type (this property is not
-// verified for defined types, which may be in the process of being set up and which don't
-// have a valid underlying type yet).
-// NewInterfaceType takes ownership of the provided methods and may modify their types by setting
-// missing receivers. To compute the method set of the interface, Complete must be called.
-func NewInterfaceType(methods []*Func, embeddeds []Type) *Interface {
- if len(methods) == 0 && len(embeddeds) == 0 {
- return &emptyInterface
- }
-
- // set method receivers if necessary
- typ := new(Interface)
- for _, m := range methods {
- if sig := m.typ.(*Signature); sig.recv == nil {
- sig.recv = NewVar(m.pos, m.pkg, "", typ)
- }
- }
-
- // All embedded types should be interfaces; however, defined types
- // may not yet be fully resolved. Only verify that non-defined types
- // are interfaces. This matches the behavior of the code before the
- // fix for #25301 (issue #25596).
- for _, t := range embeddeds {
- if _, ok := t.(*Named); !ok && !IsInterface(t) {
- panic("embedded type is not an interface")
- }
- }
-
- // sort for API stability
- sortMethods(methods)
- sortTypes(embeddeds)
-
- typ.methods = methods
- typ.embeddeds = embeddeds
- return typ
-}
-
-// NumExplicitMethods returns the number of explicitly declared methods of interface t.
-func (t *Interface) NumExplicitMethods() int { return len(t.methods) }
-
-// ExplicitMethod returns the i'th explicitly declared method of interface t for 0 <= i < t.NumExplicitMethods().
-// The methods are ordered by their unique Id.
-func (t *Interface) ExplicitMethod(i int) *Func { return t.methods[i] }
-
-// NumEmbeddeds returns the number of embedded types in interface t.
-func (t *Interface) NumEmbeddeds() int { return len(t.embeddeds) }
-
-// Embedded returns the i'th embedded defined (*Named) type of interface t for 0 <= i < t.NumEmbeddeds().
-// The result is nil if the i'th embedded type is not a defined type.
-//
-// Deprecated: Use EmbeddedType which is not restricted to defined (*Named) types.
-func (t *Interface) Embedded(i int) *Named { tname, _ := t.embeddeds[i].(*Named); return tname }
-
-// EmbeddedType returns the i'th embedded type of interface t for 0 <= i < t.NumEmbeddeds().
-func (t *Interface) EmbeddedType(i int) Type { return t.embeddeds[i] }
-
-// NumMethods returns the total number of methods of interface t.
-// The interface must have been completed.
-func (t *Interface) NumMethods() int { t.assertCompleteness(); return len(t.allMethods) }
-
-func (t *Interface) assertCompleteness() {
- if t.allMethods == nil {
- panic("interface is incomplete")
- }
-}
-
-// Method returns the i'th method of interface t for 0 <= i < t.NumMethods().
-// The methods are ordered by their unique Id.
-// The interface must have been completed.
-func (t *Interface) Method(i int) *Func { t.assertCompleteness(); return t.allMethods[i] }
-
-// Empty reports whether t is the empty interface.
-func (t *Interface) Empty() bool {
- if t.allMethods != nil {
- // interface is complete - quick test
- // A non-nil allTypes may still be empty and represents the bottom type.
- return len(t.allMethods) == 0 && t.allTypes == nil
- }
- return !t.iterate(func(t *Interface) bool {
- return len(t.methods) > 0 || t.types != nil
- }, nil)
-}
-
-// HasTypeList reports whether interface t has a type list, possibly from an embedded type.
-func (t *Interface) HasTypeList() bool {
- if t.allMethods != nil {
- // interface is complete - quick test
- return t.allTypes != nil
- }
-
- return t.iterate(func(t *Interface) bool {
- return t.types != nil
- }, nil)
-}
-
-// IsComparable reports whether interface t is or embeds the predeclared interface "comparable".
-func (t *Interface) IsComparable() bool {
- if t.allMethods != nil {
- // interface is complete - quick test
- _, m := lookupMethod(t.allMethods, nil, "==")
- return m != nil
- }
-
- return t.iterate(func(t *Interface) bool {
- _, m := lookupMethod(t.methods, nil, "==")
- return m != nil
- }, nil)
-}
-
-// IsConstraint reports t.HasTypeList() || t.IsComparable().
-func (t *Interface) IsConstraint() bool {
- if t.allMethods != nil {
- // interface is complete - quick test
- if t.allTypes != nil {
- return true
- }
- _, m := lookupMethod(t.allMethods, nil, "==")
- return m != nil
- }
-
- return t.iterate(func(t *Interface) bool {
- if t.types != nil {
- return true
- }
- _, m := lookupMethod(t.methods, nil, "==")
- return m != nil
- }, nil)
-}
-
-// iterate calls f with t and then with any embedded interface of t, recursively, until f returns true.
-// iterate reports whether any call to f returned true.
-func (t *Interface) iterate(f func(*Interface) bool, seen map[*Interface]bool) bool {
- if f(t) {
- return true
- }
- for _, e := range t.embeddeds {
- // e should be an interface but be careful (it may be invalid)
- if e := asInterface(e); e != nil {
- // Cyclic interfaces such as "type E interface { E }" are not permitted
- // but they are still constructed and we need to detect such cycles.
- if seen[e] {
- continue
- }
- if seen == nil {
- seen = make(map[*Interface]bool)
- }
- seen[e] = true
- if e.iterate(f, seen) {
- return true
- }
- }
- }
- return false
-}
-
-// isSatisfiedBy reports whether interface t's type list is satisfied by the type typ.
-// If the type list is empty (absent), typ trivially satisfies the interface.
-// TODO(gri) This is not a great name. Eventually, we should have a more comprehensive
-// "implements" predicate.
-func (t *Interface) isSatisfiedBy(typ Type) bool {
- t.Complete()
- if t.allTypes == nil {
- return true
- }
- types := unpack(t.allTypes)
- return includes(types, typ) || includes(types, under(typ))
-}
-
-// Complete computes the interface's method set. It must be called by users of
-// NewInterfaceType and NewInterface after the interface's embedded types are
-// fully defined and before using the interface type in any way other than to
-// form other types. The interface must not contain duplicate methods or a
-// panic occurs. Complete returns the receiver.
-func (t *Interface) Complete() *Interface {
- // TODO(gri) consolidate this method with Checker.completeInterface
- if t.allMethods != nil {
- return t
- }
-
- t.allMethods = markComplete // avoid infinite recursion
-
- var todo []*Func
- var methods []*Func
- var seen objset
- addMethod := func(m *Func, explicit bool) {
- switch other := seen.insert(m); {
- case other == nil:
- methods = append(methods, m)
- case explicit:
- panic("duplicate method " + m.name)
- default:
- // check method signatures after all locally embedded interfaces are computed
- todo = append(todo, m, other.(*Func))
- }
- }
-
- for _, m := range t.methods {
- addMethod(m, true)
- }
-
- allTypes := t.types
+// top represents the top of the type lattice.
+// It is the underlying type of a type parameter that
+// can be satisfied by any type (ignoring methods),
+// because its type constraint contains no restrictions
+// besides methods.
+type top struct{}
- for _, typ := range t.embeddeds {
- utyp := under(typ)
- etyp := asInterface(utyp)
- if etyp == nil {
- if utyp != Typ[Invalid] {
- panic(fmt.Sprintf("%s is not an interface", typ))
- }
- continue
- }
- etyp.Complete()
- for _, m := range etyp.allMethods {
- addMethod(m, false)
- }
- allTypes = intersect(allTypes, etyp.allTypes)
- }
+// theTop is the singleton top type.
+var theTop = &top{}
- for i := 0; i < len(todo); i += 2 {
- m := todo[i]
- other := todo[i+1]
- if !Identical(m.typ, other.typ) {
- panic("duplicate method " + m.name)
- }
- }
+func (t *top) Underlying() Type { return t }
+func (t *top) String() string { return TypeString(t, nil) }
- if methods != nil {
- sortMethods(methods)
- t.allMethods = methods
+// under returns the true expanded underlying type.
+// If it doesn't exist, the result is Typ[Invalid].
+// under must only be called when a type is known
+// to be fully set up.
+func under(t Type) Type {
+ // TODO(gri) is this correct for *Union?
+ if n := asNamed(t); n != nil {
+ return n.under()
}
- t.allTypes = allTypes
-
return t
}
-// A Map represents a map type.
-type Map struct {
- key, elem Type
-}
-
-// NewMap returns a new map for the given key and element types.
-func NewMap(key, elem Type) *Map {
- return &Map{key: key, elem: elem}
-}
-
-// Key returns the key type of map m.
-func (m *Map) Key() Type { return m.key }
-
-// Elem returns the element type of map m.
-func (m *Map) Elem() Type { return m.elem }
-
-// A Chan represents a channel type.
-type Chan struct {
- dir ChanDir
- elem Type
-}
-
-// A ChanDir value indicates a channel direction.
-type ChanDir int
-
-// The direction of a channel is indicated by one of these constants.
-const (
- SendRecv ChanDir = iota
- SendOnly
- RecvOnly
-)
-
-// NewChan returns a new channel type for the given direction and element type.
-func NewChan(dir ChanDir, elem Type) *Chan {
- return &Chan{dir: dir, elem: elem}
-}
-
-// Dir returns the direction of channel c.
-func (c *Chan) Dir() ChanDir { return c.dir }
-
-// Elem returns the element type of channel c.
-func (c *Chan) Elem() Type { return c.elem }
-
-// TODO(gri) Clean up Named struct below; specifically the fromRHS field (can we use underlying?).
-
-// A Named represents a named (defined) type.
-type Named struct {
- check *Checker // for Named.under implementation
- info typeInfo // for cycle detection
- obj *TypeName // corresponding declared object
- orig *Named // original, uninstantiated type
- fromRHS Type // type (on RHS of declaration) this *Named type is derived from (for cycle reporting)
- underlying Type // possibly a *Named during setup; never a *Named once set up completely
- tparams []*TypeName // type parameters, or nil
- targs []Type // type arguments (after instantiation), or nil
- methods []*Func // methods declared for this type (not the method set of this type); signatures are type-checked lazily
-}
-
-// NewNamed returns a new named type for the given type name, underlying type, and associated methods.
-// If the given type name obj doesn't have a type yet, its type is set to the returned named type.
-// The underlying type must not be a *Named.
-func NewNamed(obj *TypeName, underlying Type, methods []*Func) *Named {
- if _, ok := underlying.(*Named); ok {
- panic("types2.NewNamed: underlying type must not be *Named")
- }
- return (*Checker)(nil).newNamed(obj, nil, underlying, nil, methods)
-}
-
-// newNamed is like NewNamed but with a *Checker receiver and additional orig argument.
-func (check *Checker) newNamed(obj *TypeName, orig *Named, underlying Type, tparams []*TypeName, methods []*Func) *Named {
- typ := &Named{check: check, obj: obj, orig: orig, fromRHS: underlying, underlying: underlying, tparams: tparams, methods: methods}
- if typ.orig == nil {
- typ.orig = typ
- }
- if obj.typ == nil {
- obj.typ = typ
- }
- return typ
-}
-
-// Obj returns the type name for the named type t.
-func (t *Named) Obj() *TypeName { return t.obj }
-
-// Orig returns the original generic type an instantiated type is derived from.
-// If t is not an instantiated type, the result is t.
-func (t *Named) Orig() *Named { return t.orig }
-
-// TODO(gri) Come up with a better representation and API to distinguish
-// between parameterized instantiated and non-instantiated types.
-
-// TParams returns the type parameters of the named type t, or nil.
-// The result is non-nil for an (originally) parameterized type even if it is instantiated.
-func (t *Named) TParams() []*TypeName { return t.tparams }
-
-// SetTParams sets the type parameters of the named type t.
-func (t *Named) SetTParams(tparams []*TypeName) { t.tparams = tparams }
-
-// TArgs returns the type arguments after instantiation of the named type t, or nil if not instantiated.
-func (t *Named) TArgs() []Type { return t.targs }
-
-// SetTArgs sets the type arguments of the named type t.
-func (t *Named) SetTArgs(args []Type) { t.targs = args }
-
-// NumMethods returns the number of explicit methods whose receiver is named type t.
-func (t *Named) NumMethods() int { return len(t.methods) }
-
-// Method returns the i'th method of named type t for 0 <= i < t.NumMethods().
-func (t *Named) Method(i int) *Func { return t.methods[i] }
-
-// SetUnderlying sets the underlying type and marks t as complete.
-func (t *Named) SetUnderlying(underlying Type) {
- if underlying == nil {
- panic("types2.Named.SetUnderlying: underlying type must not be nil")
- }
- if _, ok := underlying.(*Named); ok {
- panic("types2.Named.SetUnderlying: underlying type must not be *Named")
- }
- t.underlying = underlying
-}
-
-// AddMethod adds method m unless it is already in the method list.
-func (t *Named) AddMethod(m *Func) {
- if i, _ := lookupMethod(t.methods, m.pkg, m.name); i < 0 {
- t.methods = append(t.methods, m)
- }
-}
-
-// Note: This is a uint32 rather than a uint64 because the
-// respective 64 bit atomic instructions are not available
-// on all platforms.
-var lastId uint32
-
-// nextId returns a value increasing monotonically by 1 with
-// each call, starting with 1. It may be called concurrently.
-func nextId() uint64 { return uint64(atomic.AddUint32(&lastId, 1)) }
-
-// A TypeParam represents a type parameter type.
-type TypeParam struct {
- check *Checker // for lazy type bound completion
- id uint64 // unique id, for debugging only
- obj *TypeName // corresponding type name
- index int // type parameter index in source order, starting at 0
- bound Type // *Named or *Interface; underlying type is always *Interface
-}
-
-// Obj returns the type name for the type parameter t.
-func (t *TypeParam) Obj() *TypeName { return t.obj }
-
-// NewTypeParam returns a new TypeParam.
-func (check *Checker) NewTypeParam(obj *TypeName, index int, bound Type) *TypeParam {
- assert(bound != nil)
- typ := &TypeParam{check: check, id: nextId(), obj: obj, index: index, bound: bound}
- if obj.typ == nil {
- obj.typ = typ
- }
- return typ
-}
-
-func (t *TypeParam) Bound() *Interface {
- iface := asInterface(t.bound)
- // use the type bound position if we have one
- pos := nopos
- if n, _ := t.bound.(*Named); n != nil {
- pos = n.obj.pos
- }
- // TODO(gri) switch this to an unexported method on Checker.
- t.check.completeInterface(pos, iface)
- return iface
-}
-
// optype returns a type's operational type. Except for
// type parameters, the operational type is the same
// as the underlying type (as returned by under). For
-// Type parameters, the operational type is determined
-// by the corresponding type bound's type list. The
-// result may be the bottom or top type, but it is never
-// the incoming type parameter.
+// Type parameters, the operational type is the structural
+// type, if any; otherwise it's the top type.
+// The result is never the incoming type parameter.
func optype(typ Type) Type {
if t := asTypeParam(typ); t != nil {
+ // TODO(gri) review accuracy of this comment
// If the optype is typ, return the top type as we have
// no information. It also prevents infinite recursion
// via the asTypeParam converter function. This can happen
// for a type parameter list of the form:
// (type T interface { type T }).
// See also issue #39680.
- if u := t.Bound().allTypes; u != nil && u != typ {
- // u != typ and u is a type parameter => under(u) != typ, so this is ok
- return under(u)
+ if u := t.structuralType(); u != nil {
+ assert(u != typ) // "naked" type parameters cannot be embedded
+ return u
}
return theTop
}
return under(typ)
}
-// An instance represents an instantiated generic type syntactically
-// (without expanding the instantiation). Type instances appear only
-// during type-checking and are replaced by their fully instantiated
-// (expanded) types before the end of type-checking.
-type instance struct {
- check *Checker // for lazy instantiation
- pos syntax.Pos // position of type instantiation; for error reporting only
- base *Named // parameterized type to be instantiated
- targs []Type // type arguments
- poslist []syntax.Pos // position of each targ; for error reporting only
- value Type // base(targs...) after instantiation or Typ[Invalid]; nil if not yet set
-}
-
-// expand returns the instantiated (= expanded) type of t.
-// The result is either an instantiated *Named type, or
-// Typ[Invalid] if there was an error.
-func (t *instance) expand() Type {
- v := t.value
- if v == nil {
- v = t.check.instantiate(t.pos, t.base, t.targs, t.poslist)
- if v == nil {
- v = Typ[Invalid]
- }
- t.value = v
- }
- // After instantiation we must have an invalid or a *Named type.
- if debug && v != Typ[Invalid] {
- _ = v.(*Named)
- }
- return v
-}
-
-// expand expands a type instance into its instantiated
-// type and leaves all other types alone. expand does
-// not recurse.
-func expand(typ Type) Type {
- if t, _ := typ.(*instance); t != nil {
- return t.expand()
- }
- return typ
-}
-
-// expandf is set to expand.
-// Call expandf when calling expand causes compile-time cycle error.
-var expandf func(Type) Type
-
-func init() { expandf = expand }
-
-// bottom represents the bottom of the type lattice.
-// It is the underlying type of a type parameter that
-// cannot be satisfied by any type, usually because
-// the intersection of type constraints left nothing).
-type bottom struct{}
-
-// theBottom is the singleton bottom type.
-var theBottom = &bottom{}
-
-// top represents the top of the type lattice.
-// It is the underlying type of a type parameter that
-// can be satisfied by any type (ignoring methods),
-// usually because the type constraint has no type
-// list.
-type top struct{}
-
-// theTop is the singleton top type.
-var theTop = &top{}
-
-// Type-specific implementations of Underlying.
-func (t *Basic) Underlying() Type { return t }
-func (t *Array) Underlying() Type { return t }
-func (t *Slice) Underlying() Type { return t }
-func (t *Struct) Underlying() Type { return t }
-func (t *Pointer) Underlying() Type { return t }
-func (t *Tuple) Underlying() Type { return t }
-func (t *Signature) Underlying() Type { return t }
-func (t *Sum) Underlying() Type { return t }
-func (t *Interface) Underlying() Type { return t }
-func (t *Map) Underlying() Type { return t }
-func (t *Chan) Underlying() Type { return t }
-func (t *Named) Underlying() Type { return t.underlying }
-func (t *TypeParam) Underlying() Type { return t }
-func (t *instance) Underlying() Type { return t }
-func (t *bottom) Underlying() Type { return t }
-func (t *top) Underlying() Type { return t }
-
-// Type-specific implementations of String.
-func (t *Basic) String() string { return TypeString(t, nil) }
-func (t *Array) String() string { return TypeString(t, nil) }
-func (t *Slice) String() string { return TypeString(t, nil) }
-func (t *Struct) String() string { return TypeString(t, nil) }
-func (t *Pointer) String() string { return TypeString(t, nil) }
-func (t *Tuple) String() string { return TypeString(t, nil) }
-func (t *Signature) String() string { return TypeString(t, nil) }
-func (t *Sum) String() string { return TypeString(t, nil) }
-func (t *Interface) String() string { return TypeString(t, nil) }
-func (t *Map) String() string { return TypeString(t, nil) }
-func (t *Chan) String() string { return TypeString(t, nil) }
-func (t *Named) String() string { return TypeString(t, nil) }
-func (t *TypeParam) String() string { return TypeString(t, nil) }
-func (t *instance) String() string { return TypeString(t, nil) }
-func (t *bottom) String() string { return TypeString(t, nil) }
-func (t *top) String() string { return TypeString(t, nil) }
-
-// under returns the true expanded underlying type.
-// If it doesn't exist, the result is Typ[Invalid].
-// under must only be called when a type is known
-// to be fully set up.
-func under(t Type) Type {
- // TODO(gri) is this correct for *Sum?
- if n := asNamed(t); n != nil {
- return n.under()
- }
- return t
-}
-
// Converters
//
// A converter must only be called when a type is
@@ -944,39 +98,26 @@ func asPointer(t Type) *Pointer {
return op
}
-// asTuple is not needed - not provided
-
func asSignature(t Type) *Signature {
op, _ := optype(t).(*Signature)
return op
}
-func asSum(t Type) *Sum {
- op, _ := optype(t).(*Sum)
- return op
-}
+// If the argument to asInterface, asNamed, or asTypeParam is of the respective type
+// (possibly after expanding an instance type), these methods return that type.
+// Otherwise the result is nil.
+// asInterface does not need to look at optype (type sets don't contain interfaces)
func asInterface(t Type) *Interface {
- op, _ := optype(t).(*Interface)
- return op
-}
-
-func asMap(t Type) *Map {
- op, _ := optype(t).(*Map)
- return op
-}
-
-func asChan(t Type) *Chan {
- op, _ := optype(t).(*Chan)
- return op
+ u, _ := under(t).(*Interface)
+ return u
}
-// If the argument to asNamed and asTypeParam is of the respective types
-// (possibly after expanding an instance type), these methods return that type.
-// Otherwise the result is nil.
-
func asNamed(t Type) *Named {
- e, _ := expand(t).(*Named)
+ e, _ := t.(*Named)
+ if e != nil {
+ e.expand(nil)
+ }
return e
}
@@ -991,3 +132,4 @@ func AsPointer(t Type) *Pointer { return asPointer(t) }
func AsNamed(t Type) *Named { return asNamed(t) }
func AsSignature(t Type) *Signature { return asSignature(t) }
func AsInterface(t Type) *Interface { return asInterface(t) }
+func AsTypeParam(t Type) *TypeParam { return asTypeParam(t) }
diff --git a/src/cmd/compile/internal/types2/typelists.go b/src/cmd/compile/internal/types2/typelists.go
new file mode 100644
index 0000000000..3258a5e9f8
--- /dev/null
+++ b/src/cmd/compile/internal/types2/typelists.go
@@ -0,0 +1,69 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package types2
+
+// TParamList holds a list of type parameters.
+type TParamList struct{ tparams []*TypeParam }
+
+// Len returns the number of type parameters in the list.
+// It is safe to call on a nil receiver.
+func (l *TParamList) Len() int { return len(l.list()) }
+
+// At returns the i'th type parameter in the list.
+func (l *TParamList) At(i int) *TypeParam { return l.tparams[i] }
+
+// list is for internal use where we expect a []*TypeParam.
+// TODO(rfindley): list should probably be eliminated: we can pass around a
+// TParamList instead.
+func (l *TParamList) list() []*TypeParam {
+ if l == nil {
+ return nil
+ }
+ return l.tparams
+}
+
+// TypeList holds a list of types.
+type TypeList struct{ types []Type }
+
+// NewTypeList returns a new TypeList with the types in list.
+func NewTypeList(list []Type) *TypeList {
+ if len(list) == 0 {
+ return nil
+ }
+ return &TypeList{list}
+}
+
+// Len returns the number of types in the list.
+// It is safe to call on a nil receiver.
+func (l *TypeList) Len() int { return len(l.list()) }
+
+// At returns the i'th type in the list.
+func (l *TypeList) At(i int) Type { return l.types[i] }
+
+// list is for internal use where we expect a []Type.
+// TODO(rfindley): list should probably be eliminated: we can pass around a
+// TypeList instead.
+func (l *TypeList) list() []Type {
+ if l == nil {
+ return nil
+ }
+ return l.types
+}
+
+// ----------------------------------------------------------------------------
+// Implementation
+
+func bindTParams(list []*TypeParam) *TParamList {
+ if len(list) == 0 {
+ return nil
+ }
+ for i, typ := range list {
+ if typ.index >= 0 {
+ panic("type parameter bound more than once")
+ }
+ typ.index = i
+ }
+ return &TParamList{tparams: list}
+}
diff --git a/src/cmd/compile/internal/types2/typeparam.go b/src/cmd/compile/internal/types2/typeparam.go
new file mode 100644
index 0000000000..445337fee8
--- /dev/null
+++ b/src/cmd/compile/internal/types2/typeparam.go
@@ -0,0 +1,108 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package types2
+
+import "sync/atomic"
+
+// Note: This is a uint32 rather than a uint64 because the
+// respective 64 bit atomic instructions are not available
+// on all platforms.
+var lastID uint32
+
+// nextID returns a value increasing monotonically by 1 with
+// each call, starting with 1. It may be called concurrently.
+func nextID() uint64 { return uint64(atomic.AddUint32(&lastID, 1)) }
+
+// A TypeParam represents a type parameter type.
+type TypeParam struct {
+ check *Checker // for lazy type bound completion
+ id uint64 // unique id, for debugging only
+ obj *TypeName // corresponding type name
+ index int // type parameter index in source order, starting at 0
+ // TODO(rfindley): this could also be Typ[Invalid]. Verify that this is handled correctly.
+ bound Type // *Named or *Interface; underlying type is always *Interface
+}
+
+// Obj returns the type name for the type parameter t.
+func (t *TypeParam) Obj() *TypeName { return t.obj }
+
+// NewTypeParam returns a new TypeParam. Type parameters may be set on a Named
+// or Signature type by calling SetTParams. Setting a type parameter on more
+// than one type will result in a panic.
+//
+// The bound argument can be nil, and set later via SetBound.
+func (check *Checker) NewTypeParam(obj *TypeName, bound Type) *TypeParam {
+ // Always increment lastID, even if it is not used.
+ id := nextID()
+ if check != nil {
+ check.nextID++
+ id = check.nextID
+ }
+ typ := &TypeParam{check: check, id: id, obj: obj, index: -1, bound: bound}
+ if obj.typ == nil {
+ obj.typ = typ
+ }
+ return typ
+}
+
+// Index returns the index of the type param within its param list.
+func (t *TypeParam) Index() int {
+ return t.index
+}
+
+// SetId sets the unique id of a type param. Should only be used for type params
+// in imported generic types.
+func (t *TypeParam) SetId(id uint64) {
+ t.id = id
+}
+
+// Constraint returns the type constraint specified for t.
+func (t *TypeParam) Constraint() Type {
+ // compute the type set if possible (we may not have an interface)
+ if iface, _ := under(t.bound).(*Interface); iface != nil {
+ // use the type bound position if we have one
+ pos := nopos
+ if n, _ := t.bound.(*Named); n != nil {
+ pos = n.obj.pos
+ }
+ computeInterfaceTypeSet(t.check, pos, iface)
+ }
+ return t.bound
+}
+
+// SetConstraint sets the type constraint for t.
+func (t *TypeParam) SetConstraint(bound Type) {
+ if bound == nil {
+ panic("nil constraint")
+ }
+ t.bound = bound
+}
+
+func (t *TypeParam) Underlying() Type { return t }
+func (t *TypeParam) String() string { return TypeString(t, nil) }
+
+// ----------------------------------------------------------------------------
+// Implementation
+
+// iface returns the constraint interface of t.
+func (t *TypeParam) iface() *Interface {
+ if iface, _ := under(t.Constraint()).(*Interface); iface != nil {
+ return iface
+ }
+ return &emptyInterface
+}
+
+// structuralType returns the structural type of the type parameter's constraint; or nil.
+func (t *TypeParam) structuralType() Type {
+ return t.iface().typeSet().structuralType()
+}
+
+func (t *TypeParam) is(f func(*term) bool) bool {
+ return t.iface().typeSet().is(f)
+}
+
+func (t *TypeParam) underIs(f func(Type) bool) bool {
+ return t.iface().typeSet().underIs(f)
+}
diff --git a/src/cmd/compile/internal/types2/types_test.go b/src/cmd/compile/internal/types2/types_test.go
index 096402148d..1525844f2d 100644
--- a/src/cmd/compile/internal/types2/types_test.go
+++ b/src/cmd/compile/internal/types2/types_test.go
@@ -4,14 +4,9 @@
package types2
-import "sync/atomic"
-
func init() {
acceptMethodTypeParams = true
}
-// Upon calling ResetId, nextId starts with 1 again.
-// It may be called concurrently. This is only needed
-// for tests where we may want to have a consistent
-// numbering for each individual test case.
-func ResetId() { atomic.StoreUint32(&lastId, 0) }
+// Debug is set if types2 is built with debug mode enabled.
+const Debug = debug
diff --git a/src/cmd/compile/internal/types2/typeset.go b/src/cmd/compile/internal/types2/typeset.go
new file mode 100644
index 0000000000..14596b68a3
--- /dev/null
+++ b/src/cmd/compile/internal/types2/typeset.go
@@ -0,0 +1,392 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package types2
+
+import (
+ "bytes"
+ "cmd/compile/internal/syntax"
+ "fmt"
+ "sort"
+)
+
+// ----------------------------------------------------------------------------
+// API
+
+// A _TypeSet represents the type set of an interface.
+type _TypeSet struct {
+ comparable bool // if set, the interface is or embeds comparable
+ // TODO(gri) consider using a set for the methods for faster lookup
+ methods []*Func // all methods of the interface; sorted by unique ID
+ terms termlist // type terms of the type set
+}
+
+// IsEmpty reports whether type set s is the empty set.
+func (s *_TypeSet) IsEmpty() bool { return s.terms.isEmpty() }
+
+// IsAll reports whether type set s is the set of all types (corresponding to the empty interface).
+func (s *_TypeSet) IsAll() bool {
+ return !s.comparable && len(s.methods) == 0 && s.terms.isAll()
+}
+
+// TODO(gri) IsMethodSet is not a great name for this predicate. Find a better one.
+
+// IsMethodSet reports whether the type set s is described by a single set of methods.
+func (s *_TypeSet) IsMethodSet() bool { return !s.comparable && s.terms.isAll() }
+
+// IsComparable reports whether each type in the set is comparable.
+func (s *_TypeSet) IsComparable() bool {
+ if s.terms.isAll() {
+ return s.comparable
+ }
+ return s.is(func(t *term) bool {
+ return Comparable(t.typ)
+ })
+}
+
+// TODO(gri) IsTypeSet is not a great name for this predicate. Find a better one.
+
+// IsTypeSet reports whether the type set s is represented by a finite set of underlying types.
+func (s *_TypeSet) IsTypeSet() bool {
+ return !s.comparable && len(s.methods) == 0
+}
+
+// NumMethods returns the number of methods available.
+func (s *_TypeSet) NumMethods() int { return len(s.methods) }
+
+// Method returns the i'th method of type set s for 0 <= i < s.NumMethods().
+// The methods are ordered by their unique ID.
+func (s *_TypeSet) Method(i int) *Func { return s.methods[i] }
+
+// LookupMethod returns the index of and method with matching package and name, or (-1, nil).
+func (s *_TypeSet) LookupMethod(pkg *Package, name string) (int, *Func) {
+ // TODO(gri) s.methods is sorted - consider binary search
+ return lookupMethod(s.methods, pkg, name)
+}
+
+func (s *_TypeSet) String() string {
+ switch {
+ case s.IsEmpty():
+ return "∅"
+ case s.IsAll():
+ return "𝓤"
+ }
+
+ hasMethods := len(s.methods) > 0
+ hasTerms := s.hasTerms()
+
+ var buf bytes.Buffer
+ buf.WriteByte('{')
+ if s.comparable {
+ buf.WriteString(" comparable")
+ if hasMethods || hasTerms {
+ buf.WriteByte(';')
+ }
+ }
+ for i, m := range s.methods {
+ if i > 0 {
+ buf.WriteByte(';')
+ }
+ buf.WriteByte(' ')
+ buf.WriteString(m.String())
+ }
+ if hasMethods && hasTerms {
+ buf.WriteByte(';')
+ }
+ if hasTerms {
+ buf.WriteString(s.terms.String())
+ }
+ buf.WriteString(" }") // there was at least one method or term
+
+ return buf.String()
+}
+
+// ----------------------------------------------------------------------------
+// Implementation
+
+func (s *_TypeSet) hasTerms() bool { return !s.terms.isAll() }
+func (s *_TypeSet) structuralType() Type { return s.terms.structuralType() }
+func (s *_TypeSet) includes(t Type) bool { return s.terms.includes(t) }
+func (s1 *_TypeSet) subsetOf(s2 *_TypeSet) bool { return s1.terms.subsetOf(s2.terms) }
+
+// TODO(gri) TypeSet.is and TypeSet.underIs should probably also go into termlist.go
+
+var topTerm = term{false, theTop}
+
+func (s *_TypeSet) is(f func(*term) bool) bool {
+ if len(s.terms) == 0 {
+ return false
+ }
+ for _, t := range s.terms {
+ // Terms represent the top term with a nil type.
+ // The rest of the type checker uses the top type
+ // instead. Convert.
+ // TODO(gri) investigate if we can do without this
+ if t.typ == nil {
+ t = &topTerm
+ }
+ if !f(t) {
+ return false
+ }
+ }
+ return true
+}
+
+func (s *_TypeSet) underIs(f func(Type) bool) bool {
+ if len(s.terms) == 0 {
+ return false
+ }
+ for _, t := range s.terms {
+ // see corresponding comment in TypeSet.is
+ u := t.typ
+ if u == nil {
+ u = theTop
+ }
+ // t == under(t) for ~t terms
+ if !t.tilde {
+ u = under(u)
+ }
+ if debug {
+ assert(Identical(u, under(u)))
+ }
+ if !f(u) {
+ return false
+ }
+ }
+ return true
+}
+
+// topTypeSet may be used as type set for the empty interface.
+var topTypeSet = _TypeSet{terms: allTermlist}
+
+// computeInterfaceTypeSet may be called with check == nil.
+func computeInterfaceTypeSet(check *Checker, pos syntax.Pos, ityp *Interface) *_TypeSet {
+ if ityp.tset != nil {
+ return ityp.tset
+ }
+
+ // If the interface is not fully set up yet, the type set will
+ // not be complete, which may lead to errors when using the the
+ // type set (e.g. missing method). Don't compute a partial type
+ // set (and don't store it!), so that we still compute the full
+ // type set eventually. Instead, return the top type set and
+ // let any follow-on errors play out.
+ if !ityp.complete {
+ return &topTypeSet
+ }
+
+ if check != nil && check.conf.Trace {
+ // Types don't generally have position information.
+ // If we don't have a valid pos provided, try to use
+ // one close enough.
+ if !pos.IsKnown() && len(ityp.methods) > 0 {
+ pos = ityp.methods[0].pos
+ }
+
+ check.trace(pos, "type set for %s", ityp)
+ check.indent++
+ defer func() {
+ check.indent--
+ check.trace(pos, "=> %s ", ityp.typeSet())
+ }()
+ }
+
+ // An infinitely expanding interface (due to a cycle) is detected
+ // elsewhere (Checker.validType), so here we simply assume we only
+ // have valid interfaces. Mark the interface as complete to avoid
+ // infinite recursion if the validType check occurs later for some
+ // reason.
+ ityp.tset = &_TypeSet{terms: allTermlist} // TODO(gri) is this sufficient?
+
+ // Methods of embedded interfaces are collected unchanged; i.e., the identity
+ // of a method I.m's Func Object of an interface I is the same as that of
+ // the method m in an interface that embeds interface I. On the other hand,
+ // if a method is embedded via multiple overlapping embedded interfaces, we
+ // don't provide a guarantee which "original m" got chosen for the embedding
+ // interface. See also issue #34421.
+ //
+ // If we don't care to provide this identity guarantee anymore, instead of
+ // reusing the original method in embeddings, we can clone the method's Func
+ // Object and give it the position of a corresponding embedded interface. Then
+ // we can get rid of the mpos map below and simply use the cloned method's
+ // position.
+
+ var todo []*Func
+ var seen objset
+ var methods []*Func
+ mpos := make(map[*Func]syntax.Pos) // method specification or method embedding position, for good error messages
+ addMethod := func(pos syntax.Pos, m *Func, explicit bool) {
+ switch other := seen.insert(m); {
+ case other == nil:
+ methods = append(methods, m)
+ mpos[m] = pos
+ case explicit:
+ if check == nil {
+ panic(fmt.Sprintf("%s: duplicate method %s", m.pos, m.name))
+ }
+ // check != nil
+ var err error_
+ err.errorf(pos, "duplicate method %s", m.name)
+ err.errorf(mpos[other.(*Func)], "other declaration of %s", m.name)
+ check.report(&err)
+ default:
+ // We have a duplicate method name in an embedded (not explicitly declared) method.
+ // Check method signatures after all types are computed (issue #33656).
+ // If we're pre-go1.14 (overlapping embeddings are not permitted), report that
+ // error here as well (even though we could do it eagerly) because it's the same
+ // error message.
+ if check == nil {
+ // check method signatures after all locally embedded interfaces are computed
+ todo = append(todo, m, other.(*Func))
+ break
+ }
+ // check != nil
+ check.later(func() {
+ if !check.allowVersion(m.pkg, 1, 14) || !Identical(m.typ, other.Type()) {
+ var err error_
+ err.errorf(pos, "duplicate method %s", m.name)
+ err.errorf(mpos[other.(*Func)], "other declaration of %s", m.name)
+ check.report(&err)
+ }
+ })
+ }
+ }
+
+ for _, m := range ityp.methods {
+ addMethod(m.pos, m, true)
+ }
+
+ // collect embedded elements
+ var allTerms = allTermlist
+ for i, typ := range ityp.embeddeds {
+ // The embedding position is nil for imported interfaces
+ // and also for interface copies after substitution (but
+ // in that case we don't need to report errors again).
+ var pos syntax.Pos // embedding position
+ if ityp.embedPos != nil {
+ pos = (*ityp.embedPos)[i]
+ }
+ var terms termlist
+ switch u := under(typ).(type) {
+ case *Interface:
+ tset := computeInterfaceTypeSet(check, pos, u)
+ if tset.comparable {
+ ityp.tset.comparable = true
+ }
+ for _, m := range tset.methods {
+ addMethod(pos, m, false) // use embedding position pos rather than m.pos
+ }
+ terms = tset.terms
+ case *Union:
+ tset := computeUnionTypeSet(check, pos, u)
+ if tset == &invalidTypeSet {
+ continue // ignore invalid unions
+ }
+ terms = tset.terms
+ case *TypeParam:
+ // Embedding stand-alone type parameters is not permitted.
+ // This case is handled during union parsing.
+ unreachable()
+ default:
+ if typ == Typ[Invalid] {
+ continue
+ }
+ if check != nil && !check.allowVersion(check.pkg, 1, 18) {
+ check.errorf(pos, "%s is not an interface", typ)
+ continue
+ }
+ terms = termlist{{false, typ}}
+ }
+ // The type set of an interface is the intersection
+ // of the type sets of all its elements.
+ // Intersection cannot produce longer termlists and
+ // thus cannot overflow.
+ allTerms = allTerms.intersect(terms)
+ }
+ ityp.embedPos = nil // not needed anymore (errors have been reported)
+
+ // process todo's (this only happens if check == nil)
+ for i := 0; i < len(todo); i += 2 {
+ m := todo[i]
+ other := todo[i+1]
+ if !Identical(m.typ, other.typ) {
+ panic(fmt.Sprintf("%s: duplicate method %s", m.pos, m.name))
+ }
+ }
+
+ if methods != nil {
+ sortMethods(methods)
+ ityp.tset.methods = methods
+ }
+ ityp.tset.terms = allTerms
+
+ return ityp.tset
+}
+
+func sortMethods(list []*Func) {
+ sort.Sort(byUniqueMethodName(list))
+}
+
+func assertSortedMethods(list []*Func) {
+ if !debug {
+ panic("assertSortedMethods called outside debug mode")
+ }
+ if !sort.IsSorted(byUniqueMethodName(list)) {
+ panic("methods not sorted")
+ }
+}
+
+// byUniqueMethodName method lists can be sorted by their unique method names.
+type byUniqueMethodName []*Func
+
+func (a byUniqueMethodName) Len() int { return len(a) }
+func (a byUniqueMethodName) Less(i, j int) bool { return a[i].less(&a[j].object) }
+func (a byUniqueMethodName) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
+
+// invalidTypeSet is a singleton type set to signal an invalid type set
+// due to an error. It's also a valid empty type set, so consumers of
+// type sets may choose to ignore it.
+var invalidTypeSet _TypeSet
+
+// computeUnionTypeSet may be called with check == nil.
+// The result is &invalidTypeSet if the union overflows.
+func computeUnionTypeSet(check *Checker, pos syntax.Pos, utyp *Union) *_TypeSet {
+ if utyp.tset != nil {
+ return utyp.tset
+ }
+
+ // avoid infinite recursion (see also computeInterfaceTypeSet)
+ utyp.tset = new(_TypeSet)
+
+ var allTerms termlist
+ for _, t := range utyp.terms {
+ var terms termlist
+ switch u := under(t.typ).(type) {
+ case *Interface:
+ terms = computeInterfaceTypeSet(check, pos, u).terms
+ case *TypeParam:
+ // A stand-alone type parameters is not permitted as union term.
+ // This case is handled during union parsing.
+ unreachable()
+ default:
+ if t.typ == Typ[Invalid] {
+ continue
+ }
+ terms = termlist{(*term)(t)}
+ }
+ // The type set of a union expression is the union
+ // of the type sets of each term.
+ allTerms = allTerms.union(terms)
+ if len(allTerms) > maxTermCount {
+ if check != nil {
+ check.errorf(pos, "cannot handle more than %d union terms (implementation limitation)", maxTermCount)
+ }
+ utyp.tset = &invalidTypeSet
+ return utyp.tset
+ }
+ }
+ utyp.tset.terms = allTerms
+
+ return utyp.tset
+}
diff --git a/src/cmd/compile/internal/types2/typeset_test.go b/src/cmd/compile/internal/types2/typeset_test.go
new file mode 100644
index 0000000000..0e14d523c8
--- /dev/null
+++ b/src/cmd/compile/internal/types2/typeset_test.go
@@ -0,0 +1,15 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package types2
+
+import "testing"
+
+func TestInvalidTypeSet(t *testing.T) {
+ if !invalidTypeSet.IsEmpty() {
+ t.Error("invalidTypeSet is not empty")
+ }
+}
+
+// TODO(gri) add more tests
diff --git a/src/cmd/compile/internal/types2/typestring.go b/src/cmd/compile/internal/types2/typestring.go
index 40016697b7..2c34d036db 100644
--- a/src/cmd/compile/internal/types2/typestring.go
+++ b/src/cmd/compile/internal/types2/typestring.go
@@ -39,27 +39,6 @@ func RelativeTo(pkg *Package) Qualifier {
}
}
-// If gcCompatibilityMode is set, printing of types is modified
-// to match the representation of some types in the gc compiler:
-//
-// - byte and rune lose their alias name and simply stand for
-// uint8 and int32 respectively
-// - embedded interfaces get flattened (the embedding info is lost,
-// and certain recursive interface types cannot be printed anymore)
-//
-// This makes it easier to compare packages computed with the type-
-// checker vs packages imported from gc export data.
-//
-// Caution: This flag affects all uses of WriteType, globally.
-// It is only provided for testing in conjunction with
-// gc-generated data.
-//
-// This flag is exported in the x/tools/go/types package. We don't
-// need it at the moment in the std repo and so we don't export it
-// anymore. We should eventually try to remove it altogether.
-// TODO(gri) remove this
-var gcCompatibilityMode bool
-
// TypeString returns the string representation of typ.
// The Qualifier controls the printing of
// package-level objects, and may be nil.
@@ -106,16 +85,6 @@ func writeType(buf *bytes.Buffer, typ Type, qf Qualifier, visited []Type) {
break
}
}
-
- if gcCompatibilityMode {
- // forget the alias names
- switch t.kind {
- case Byte:
- t = Typ[Uint8]
- case Rune:
- t = Typ[Int32]
- }
- }
buf.WriteString(t.name)
case *Array:
@@ -157,80 +126,39 @@ func writeType(buf *bytes.Buffer, typ Type, qf Qualifier, visited []Type) {
buf.WriteString("func")
writeSignature(buf, t, qf, visited)
- case *Sum:
- for i, t := range t.types {
+ case *Union:
+ // Unions only appear as (syntactic) embedded elements
+ // in interfaces and syntactically cannot be empty.
+ if t.Len() == 0 {
+ panic("empty union")
+ }
+ for i, t := range t.terms {
if i > 0 {
- buf.WriteString(", ")
+ buf.WriteByte('|')
}
- writeType(buf, t, qf, visited)
+ if t.tilde {
+ buf.WriteByte('~')
+ }
+ writeType(buf, t.typ, qf, visited)
}
case *Interface:
- // We write the source-level methods and embedded types rather
- // than the actual method set since resolved method signatures
- // may have non-printable cycles if parameters have embedded
- // interface types that (directly or indirectly) embed the
- // current interface. For instance, consider the result type
- // of m:
- //
- // type T interface{
- // m() interface{ T }
- // }
- //
buf.WriteString("interface{")
- empty := true
- if gcCompatibilityMode {
- // print flattened interface
- // (useful to compare against gc-generated interfaces)
- for i, m := range t.allMethods {
- if i > 0 {
- buf.WriteString("; ")
- }
- buf.WriteString(m.name)
- writeSignature(buf, m.typ.(*Signature), qf, visited)
- empty = false
- }
- if !empty && t.allTypes != nil {
- buf.WriteString("; ")
- }
- if t.allTypes != nil {
- buf.WriteString("type ")
- writeType(buf, t.allTypes, qf, visited)
- }
- } else {
- // print explicit interface methods and embedded types
- for i, m := range t.methods {
- if i > 0 {
- buf.WriteString("; ")
- }
- buf.WriteString(m.name)
- writeSignature(buf, m.typ.(*Signature), qf, visited)
- empty = false
- }
- if !empty && t.types != nil {
+ first := true
+ for _, m := range t.methods {
+ if !first {
buf.WriteString("; ")
}
- if t.types != nil {
- buf.WriteString("type ")
- writeType(buf, t.types, qf, visited)
- empty = false
- }
- if !empty && len(t.embeddeds) > 0 {
- buf.WriteString("; ")
- }
- for i, typ := range t.embeddeds {
- if i > 0 {
- buf.WriteString("; ")
- }
- writeType(buf, typ, qf, visited)
- empty = false
- }
+ first = false
+ buf.WriteString(m.name)
+ writeSignature(buf, m.typ.(*Signature), qf, visited)
}
- if t.allMethods == nil || len(t.methods) > len(t.allMethods) {
- if !empty {
- buf.WriteByte(' ')
+ for _, typ := range t.embeddeds {
+ if !first {
+ buf.WriteString("; ")
}
- buf.WriteString("/* incomplete */")
+ first = false
+ writeType(buf, typ, qf, visited)
}
buf.WriteByte('}')
@@ -255,7 +183,7 @@ func writeType(buf *bytes.Buffer, typ Type, qf Qualifier, visited []Type) {
case RecvOnly:
s = "<-chan "
default:
- panic("unreachable")
+ unreachable()
}
buf.WriteString(s)
if parens {
@@ -267,39 +195,40 @@ func writeType(buf *bytes.Buffer, typ Type, qf Qualifier, visited []Type) {
}
case *Named:
+ if t.instance != nil {
+ buf.WriteByte(instanceMarker)
+ }
writeTypeName(buf, t.obj, qf)
if t.targs != nil {
// instantiated type
buf.WriteByte('[')
- writeTypeList(buf, t.targs, qf, visited)
+ writeTypeList(buf, t.targs.list(), qf, visited)
buf.WriteByte(']')
- } else if t.tparams != nil {
+ } else if t.TParams().Len() != 0 {
// parameterized type
- writeTParamList(buf, t.tparams, qf, visited)
+ writeTParamList(buf, t.TParams().list(), qf, visited)
}
case *TypeParam:
s := "?"
if t.obj != nil {
+ // Optionally write out package for typeparams (like Named).
+ // TODO(danscales): this is required for import/export, so
+ // we maybe need a separate function that won't be changed
+ // for debugging purposes.
+ if t.obj.pkg != nil {
+ writePackage(buf, t.obj.pkg, qf)
+ }
s = t.obj.name
}
buf.WriteString(s + subscript(t.id))
- case *instance:
- buf.WriteByte(instanceMarker) // indicate "non-evaluated" syntactic instance
- writeTypeName(buf, t.base.obj, qf)
- buf.WriteByte('[')
- writeTypeList(buf, t.targs, qf, visited)
- buf.WriteByte(']')
-
- case *bottom:
- buf.WriteString("⊥")
-
case *top:
buf.WriteString("⊤")
default:
// For externally defined implementations of Type.
+ // Note: In this case cycles won't be caught.
buf.WriteString(t.String())
}
}
@@ -313,29 +242,32 @@ func writeTypeList(buf *bytes.Buffer, list []Type, qf Qualifier, visited []Type)
}
}
-func writeTParamList(buf *bytes.Buffer, list []*TypeName, qf Qualifier, visited []Type) {
+func writeTParamList(buf *bytes.Buffer, list []*TypeParam, qf Qualifier, visited []Type) {
buf.WriteString("[")
var prev Type
- for i, p := range list {
- // TODO(gri) support 'any' sugar here.
- var b Type = &emptyInterface
- if t, _ := p.typ.(*TypeParam); t != nil && t.bound != nil {
- b = t.bound
+ for i, tpar := range list {
+ // Determine the type parameter and its constraint.
+ // list is expected to hold type parameter names,
+ // but don't crash if that's not the case.
+ var bound Type
+ if tpar != nil {
+ bound = tpar.bound // should not be nil but we want to see it if it is
}
+
if i > 0 {
- if b != prev {
- // type bound changed - write previous one before advancing
+ if bound != prev {
+ // bound changed - write previous one before advancing
buf.WriteByte(' ')
writeType(buf, prev, qf, visited)
}
buf.WriteString(", ")
}
- prev = b
+ prev = bound
- if t, _ := p.typ.(*TypeParam); t != nil {
- writeType(buf, t, qf, visited)
+ if tpar != nil {
+ writeType(buf, tpar, qf, visited)
} else {
- buf.WriteString(p.name)
+ buf.WriteString(tpar.obj.name)
}
}
if prev != nil {
@@ -346,17 +278,38 @@ func writeTParamList(buf *bytes.Buffer, list []*TypeName, qf Qualifier, visited
}
func writeTypeName(buf *bytes.Buffer, obj *TypeName, qf Qualifier) {
- s := "<Named w/o object>"
- if obj != nil {
- if obj.pkg != nil {
- writePackage(buf, obj.pkg, qf)
+ if obj == nil {
+ buf.WriteString("<Named w/o object>")
+ return
+ }
+ if obj.pkg != nil {
+ writePackage(buf, obj.pkg, qf)
+ }
+ buf.WriteString(obj.name)
+
+ if instanceHashing != 0 {
+ // For local defined types, use the (original!) TypeName's scope
+ // numbers to disambiguate.
+ typ := obj.typ.(*Named)
+ // TODO(gri) Figure out why typ.orig != typ.orig.orig sometimes
+ // and whether the loop can iterate more than twice.
+ // (It seems somehow connected to instance types.)
+ for typ.orig != typ {
+ typ = typ.orig
}
- // TODO(gri): function-local named types should be displayed
- // differently from named types at package level to avoid
- // ambiguity.
- s = obj.name
+ writeScopeNumbers(buf, typ.obj.parent)
+ }
+}
+
+// writeScopeNumbers writes the number sequence for this scope to buf
+// in the form ".i.j.k" where i, j, k, etc. stand for scope numbers.
+// If a scope is nil or has no parent (such as a package scope), nothing
+// is written.
+func writeScopeNumbers(buf *bytes.Buffer, s *Scope) {
+ if s != nil && s.number > 0 {
+ writeScopeNumbers(buf, s.parent)
+ fmt.Fprintf(buf, ".%d", s.number)
}
- buf.WriteString(s)
}
func writeTuple(buf *bytes.Buffer, tup *Tuple, variadic bool, qf Qualifier, visited []Type) {
@@ -379,7 +332,7 @@ func writeTuple(buf *bytes.Buffer, tup *Tuple, variadic bool, qf Qualifier, visi
// special case:
// append(s, "foo"...) leads to signature func([]byte, string...)
if t := asBasic(typ); t == nil || t.kind != String {
- panic("internal error: string type expected")
+ panic("expected string type")
}
writeType(buf, typ, qf, visited)
buf.WriteString("...")
@@ -401,8 +354,8 @@ func WriteSignature(buf *bytes.Buffer, sig *Signature, qf Qualifier) {
}
func writeSignature(buf *bytes.Buffer, sig *Signature, qf Qualifier, visited []Type) {
- if sig.tparams != nil {
- writeTParamList(buf, sig.tparams, qf, visited)
+ if sig.TParams().Len() != 0 {
+ writeTParamList(buf, sig.TParams().list(), qf, visited)
}
writeTuple(buf, sig.params, sig.variadic, qf, visited)
diff --git a/src/cmd/compile/internal/types2/typestring_test.go b/src/cmd/compile/internal/types2/typestring_test.go
index d98e9a5ade..0ed2934961 100644
--- a/src/cmd/compile/internal/types2/typestring_test.go
+++ b/src/cmd/compile/internal/types2/typestring_test.go
@@ -91,7 +91,8 @@ var independentTestTypes = []testEntry{
dup("interface{}"),
dup("interface{m()}"),
dup(`interface{String() string; m(int) float32}`),
- dup(`interface{type int, float32, complex128}`),
+ dup("interface{int|float32|complex128}"),
+ dup("interface{int|~float32|~complex128}"),
// maps
dup("map[string]int"),
@@ -135,60 +136,6 @@ func TestTypeString(t *testing.T) {
}
}
-var nopos syntax.Pos
-
-func TestIncompleteInterfaces(t *testing.T) {
- sig := NewSignature(nil, nil, nil, false)
- m := NewFunc(nopos, nil, "m", sig)
- for _, test := range []struct {
- typ *Interface
- want string
- }{
- {new(Interface), "interface{/* incomplete */}"},
- {new(Interface).Complete(), "interface{}"},
-
- {NewInterface(nil, nil), "interface{}"},
- {NewInterface(nil, nil).Complete(), "interface{}"},
- {NewInterface([]*Func{}, nil), "interface{}"},
- {NewInterface([]*Func{}, nil).Complete(), "interface{}"},
- {NewInterface(nil, []*Named{}), "interface{}"},
- {NewInterface(nil, []*Named{}).Complete(), "interface{}"},
- {NewInterface([]*Func{m}, nil), "interface{m() /* incomplete */}"},
- {NewInterface([]*Func{m}, nil).Complete(), "interface{m()}"},
- {NewInterface(nil, []*Named{newDefined(new(Interface).Complete())}), "interface{T /* incomplete */}"},
- {NewInterface(nil, []*Named{newDefined(new(Interface).Complete())}).Complete(), "interface{T}"},
- {NewInterface(nil, []*Named{newDefined(NewInterface([]*Func{m}, nil))}), "interface{T /* incomplete */}"},
- {NewInterface(nil, []*Named{newDefined(NewInterface([]*Func{m}, nil).Complete())}), "interface{T /* incomplete */}"},
- {NewInterface(nil, []*Named{newDefined(NewInterface([]*Func{m}, nil).Complete())}).Complete(), "interface{T}"},
-
- {NewInterfaceType(nil, nil), "interface{}"},
- {NewInterfaceType(nil, nil).Complete(), "interface{}"},
- {NewInterfaceType([]*Func{}, nil), "interface{}"},
- {NewInterfaceType([]*Func{}, nil).Complete(), "interface{}"},
- {NewInterfaceType(nil, []Type{}), "interface{}"},
- {NewInterfaceType(nil, []Type{}).Complete(), "interface{}"},
- {NewInterfaceType([]*Func{m}, nil), "interface{m() /* incomplete */}"},
- {NewInterfaceType([]*Func{m}, nil).Complete(), "interface{m()}"},
- {NewInterfaceType(nil, []Type{new(Interface).Complete()}), "interface{interface{} /* incomplete */}"},
- {NewInterfaceType(nil, []Type{new(Interface).Complete()}).Complete(), "interface{interface{}}"},
- {NewInterfaceType(nil, []Type{NewInterfaceType([]*Func{m}, nil)}), "interface{interface{m() /* incomplete */} /* incomplete */}"},
- {NewInterfaceType(nil, []Type{NewInterfaceType([]*Func{m}, nil).Complete()}), "interface{interface{m()} /* incomplete */}"},
- {NewInterfaceType(nil, []Type{NewInterfaceType([]*Func{m}, nil).Complete()}).Complete(), "interface{interface{m()}}"},
- } {
- got := test.typ.String()
- if got != test.want {
- t.Errorf("got: %s, want: %s", got, test.want)
- }
- }
-}
-
-// newDefined creates a new defined type named T with the given underlying type.
-// Helper function for use with TestIncompleteInterfaces only.
-func newDefined(underlying Type) *Named {
- tname := NewTypeName(nopos, nil, "T", nil)
- return NewNamed(tname, underlying, nil)
-}
-
func TestQualifiedTypeString(t *testing.T) {
p, _ := pkgFor("p.go", "package p; type T int", nil)
q, _ := pkgFor("q.go", "package q", nil)
diff --git a/src/cmd/compile/internal/types2/typeterm.go b/src/cmd/compile/internal/types2/typeterm.go
new file mode 100644
index 0000000000..1d7223f13c
--- /dev/null
+++ b/src/cmd/compile/internal/types2/typeterm.go
@@ -0,0 +1,166 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package types2
+
+// A term describes elementary type sets:
+//
+// ∅: (*term)(nil) == ∅ // set of no types (empty set)
+// 𝓤: &term{} == 𝓤 // set of all types (𝓤niverse)
+// T: &term{false, T} == {T} // set of type T
+// ~t: &term{true, t} == {t' | under(t') == t} // set of types with underlying type t
+//
+type term struct {
+ tilde bool // valid if typ != nil
+ typ Type
+}
+
+func (x *term) String() string {
+ switch {
+ case x == nil:
+ return "∅"
+ case x.typ == nil:
+ return "𝓤"
+ case x.tilde:
+ return "~" + x.typ.String()
+ default:
+ return x.typ.String()
+ }
+}
+
+// equal reports whether x and y represent the same type set.
+func (x *term) equal(y *term) bool {
+ // easy cases
+ switch {
+ case x == nil || y == nil:
+ return x == y
+ case x.typ == nil || y.typ == nil:
+ return x.typ == y.typ
+ }
+ // ∅ ⊂ x, y ⊂ 𝓤
+
+ return x.tilde == y.tilde && Identical(x.typ, y.typ)
+}
+
+// union returns the union x ∪ y: zero, one, or two non-nil terms.
+func (x *term) union(y *term) (_, _ *term) {
+ // easy cases
+ switch {
+ case x == nil && y == nil:
+ return nil, nil // ∅ ∪ ∅ == ∅
+ case x == nil:
+ return y, nil // ∅ ∪ y == y
+ case y == nil:
+ return x, nil // x ∪ ∅ == x
+ case x.typ == nil:
+ return x, nil // 𝓤 ∪ y == 𝓤
+ case y.typ == nil:
+ return y, nil // x ∪ 𝓤 == 𝓤
+ }
+ // ∅ ⊂ x, y ⊂ 𝓤
+
+ if x.disjoint(y) {
+ return x, y // x ∪ y == (x, y) if x ∩ y == ∅
+ }
+ // x.typ == y.typ
+
+ // ~t ∪ ~t == ~t
+ // ~t ∪ T == ~t
+ // T ∪ ~t == ~t
+ // T ∪ T == T
+ if x.tilde || !y.tilde {
+ return x, nil
+ }
+ return y, nil
+}
+
+// intersect returns the intersection x ∩ y.
+func (x *term) intersect(y *term) *term {
+ // easy cases
+ switch {
+ case x == nil || y == nil:
+ return nil // ∅ ∩ y == ∅ and ∩ ∅ == ∅
+ case x.typ == nil:
+ return y // 𝓤 ∩ y == y
+ case y.typ == nil:
+ return x // x ∩ 𝓤 == x
+ }
+ // ∅ ⊂ x, y ⊂ 𝓤
+
+ if x.disjoint(y) {
+ return nil // x ∩ y == ∅ if x ∩ y == ∅
+ }
+ // x.typ == y.typ
+
+ // ~t ∩ ~t == ~t
+ // ~t ∩ T == T
+ // T ∩ ~t == T
+ // T ∩ T == T
+ if !x.tilde || y.tilde {
+ return x
+ }
+ return y
+}
+
+// includes reports whether t ∈ x.
+func (x *term) includes(t Type) bool {
+ // easy cases
+ switch {
+ case x == nil:
+ return false // t ∈ ∅ == false
+ case x.typ == nil:
+ return true // t ∈ 𝓤 == true
+ }
+ // ∅ ⊂ x ⊂ 𝓤
+
+ u := t
+ if x.tilde {
+ u = under(u)
+ }
+ return Identical(x.typ, u)
+}
+
+// subsetOf reports whether x ⊆ y.
+func (x *term) subsetOf(y *term) bool {
+ // easy cases
+ switch {
+ case x == nil:
+ return true // ∅ ⊆ y == true
+ case y == nil:
+ return false // x ⊆ ∅ == false since x != ∅
+ case y.typ == nil:
+ return true // x ⊆ 𝓤 == true
+ case x.typ == nil:
+ return false // 𝓤 ⊆ y == false since y != 𝓤
+ }
+ // ∅ ⊂ x, y ⊂ 𝓤
+
+ if x.disjoint(y) {
+ return false // x ⊆ y == false if x ∩ y == ∅
+ }
+ // x.typ == y.typ
+
+ // ~t ⊆ ~t == true
+ // ~t ⊆ T == false
+ // T ⊆ ~t == true
+ // T ⊆ T == true
+ return !x.tilde || y.tilde
+}
+
+// disjoint reports whether x ∩ y == ∅.
+// x.typ and y.typ must not be nil.
+func (x *term) disjoint(y *term) bool {
+ if debug && (x.typ == nil || y.typ == nil) {
+ panic("invalid argument(s)")
+ }
+ ux := x.typ
+ if y.tilde {
+ ux = under(ux)
+ }
+ uy := y.typ
+ if x.tilde {
+ uy = under(uy)
+ }
+ return !Identical(ux, uy)
+}
diff --git a/src/cmd/compile/internal/types2/typeterm_test.go b/src/cmd/compile/internal/types2/typeterm_test.go
new file mode 100644
index 0000000000..5a5c1fa447
--- /dev/null
+++ b/src/cmd/compile/internal/types2/typeterm_test.go
@@ -0,0 +1,239 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package types2
+
+import (
+ "strings"
+ "testing"
+)
+
+var myInt = func() Type {
+ tname := NewTypeName(nopos, nil, "myInt", nil)
+ return NewNamed(tname, Typ[Int], nil)
+}()
+
+var testTerms = map[string]*term{
+ "∅": nil,
+ "𝓤": {},
+ "int": {false, Typ[Int]},
+ "~int": {true, Typ[Int]},
+ "string": {false, Typ[String]},
+ "~string": {true, Typ[String]},
+ "myInt": {false, myInt},
+}
+
+func TestTermString(t *testing.T) {
+ for want, x := range testTerms {
+ if got := x.String(); got != want {
+ t.Errorf("%v.String() == %v; want %v", x, got, want)
+ }
+ }
+}
+
+func split(s string, n int) []string {
+ r := strings.Split(s, " ")
+ if len(r) != n {
+ panic("invalid test case: " + s)
+ }
+ return r
+}
+
+func testTerm(name string) *term {
+ r, ok := testTerms[name]
+ if !ok {
+ panic("invalid test argument: " + name)
+ }
+ return r
+}
+
+func TestTermEqual(t *testing.T) {
+ for _, test := range []string{
+ "∅ ∅ T",
+ "𝓤 𝓤 T",
+ "int int T",
+ "~int ~int T",
+ "myInt myInt T",
+ "∅ 𝓤 F",
+ "∅ int F",
+ "∅ ~int F",
+ "𝓤 int F",
+ "𝓤 ~int F",
+ "𝓤 myInt F",
+ "int ~int F",
+ "int myInt F",
+ "~int myInt F",
+ } {
+ args := split(test, 3)
+ x := testTerm(args[0])
+ y := testTerm(args[1])
+ want := args[2] == "T"
+ if got := x.equal(y); got != want {
+ t.Errorf("%v.equal(%v) = %v; want %v", x, y, got, want)
+ }
+ // equal is symmetric
+ x, y = y, x
+ if got := x.equal(y); got != want {
+ t.Errorf("%v.equal(%v) = %v; want %v", x, y, got, want)
+ }
+ }
+}
+
+func TestTermUnion(t *testing.T) {
+ for _, test := range []string{
+ "∅ ∅ ∅ ∅",
+ "∅ 𝓤 𝓤 ∅",
+ "∅ int int ∅",
+ "∅ ~int ~int ∅",
+ "∅ myInt myInt ∅",
+ "𝓤 𝓤 𝓤 ∅",
+ "𝓤 int 𝓤 ∅",
+ "𝓤 ~int 𝓤 ∅",
+ "𝓤 myInt 𝓤 ∅",
+ "int int int ∅",
+ "int ~int ~int ∅",
+ "int string int string",
+ "int ~string int ~string",
+ "int myInt int myInt",
+ "~int ~string ~int ~string",
+ "~int myInt ~int ∅",
+
+ // union is symmetric, but the result order isn't - repeat symmetric cases explictly
+ "𝓤 ∅ 𝓤 ∅",
+ "int ∅ int ∅",
+ "~int ∅ ~int ∅",
+ "myInt ∅ myInt ∅",
+ "int 𝓤 𝓤 ∅",
+ "~int 𝓤 𝓤 ∅",
+ "myInt 𝓤 𝓤 ∅",
+ "~int int ~int ∅",
+ "string int string int",
+ "~string int ~string int",
+ "myInt int myInt int",
+ "~string ~int ~string ~int",
+ "myInt ~int ~int ∅",
+ } {
+ args := split(test, 4)
+ x := testTerm(args[0])
+ y := testTerm(args[1])
+ want1 := testTerm(args[2])
+ want2 := testTerm(args[3])
+ if got1, got2 := x.union(y); !got1.equal(want1) || !got2.equal(want2) {
+ t.Errorf("%v.union(%v) = %v, %v; want %v, %v", x, y, got1, got2, want1, want2)
+ }
+ }
+}
+
+func TestTermIntersection(t *testing.T) {
+ for _, test := range []string{
+ "∅ ∅ ∅",
+ "∅ 𝓤 ∅",
+ "∅ int ∅",
+ "∅ ~int ∅",
+ "∅ myInt ∅",
+ "𝓤 𝓤 𝓤",
+ "𝓤 int int",
+ "𝓤 ~int ~int",
+ "𝓤 myInt myInt",
+ "int int int",
+ "int ~int int",
+ "int string ∅",
+ "int ~string ∅",
+ "int string ∅",
+ "~int ~string ∅",
+ "~int myInt myInt",
+ } {
+ args := split(test, 3)
+ x := testTerm(args[0])
+ y := testTerm(args[1])
+ want := testTerm(args[2])
+ if got := x.intersect(y); !got.equal(want) {
+ t.Errorf("%v.intersect(%v) = %v; want %v", x, y, got, want)
+ }
+ // intersect is symmetric
+ x, y = y, x
+ if got := x.intersect(y); !got.equal(want) {
+ t.Errorf("%v.intersect(%v) = %v; want %v", x, y, got, want)
+ }
+ }
+}
+
+func TestTermIncludes(t *testing.T) {
+ for _, test := range []string{
+ "∅ int F",
+ "𝓤 int T",
+ "int int T",
+ "~int int T",
+ "~int myInt T",
+ "string int F",
+ "~string int F",
+ "myInt int F",
+ } {
+ args := split(test, 3)
+ x := testTerm(args[0])
+ y := testTerm(args[1]).typ
+ want := args[2] == "T"
+ if got := x.includes(y); got != want {
+ t.Errorf("%v.includes(%v) = %v; want %v", x, y, got, want)
+ }
+ }
+}
+
+func TestTermSubsetOf(t *testing.T) {
+ for _, test := range []string{
+ "∅ ∅ T",
+ "𝓤 𝓤 T",
+ "int int T",
+ "~int ~int T",
+ "myInt myInt T",
+ "∅ 𝓤 T",
+ "∅ int T",
+ "∅ ~int T",
+ "∅ myInt T",
+ "𝓤 int F",
+ "𝓤 ~int F",
+ "𝓤 myInt F",
+ "int ~int T",
+ "int myInt F",
+ "~int myInt F",
+ "myInt int F",
+ "myInt ~int T",
+ } {
+ args := split(test, 3)
+ x := testTerm(args[0])
+ y := testTerm(args[1])
+ want := args[2] == "T"
+ if got := x.subsetOf(y); got != want {
+ t.Errorf("%v.subsetOf(%v) = %v; want %v", x, y, got, want)
+ }
+ }
+}
+
+func TestTermDisjoint(t *testing.T) {
+ for _, test := range []string{
+ "int int F",
+ "~int ~int F",
+ "int ~int F",
+ "int string T",
+ "int ~string T",
+ "int myInt T",
+ "~int ~string T",
+ "~int myInt F",
+ "string myInt T",
+ "~string myInt T",
+ } {
+ args := split(test, 3)
+ x := testTerm(args[0])
+ y := testTerm(args[1])
+ want := args[2] == "T"
+ if got := x.disjoint(y); got != want {
+ t.Errorf("%v.disjoint(%v) = %v; want %v", x, y, got, want)
+ }
+ // disjoint is symmetric
+ x, y = y, x
+ if got := x.disjoint(y); got != want {
+ t.Errorf("%v.disjoint(%v) = %v; want %v", x, y, got, want)
+ }
+ }
+}
diff --git a/src/cmd/compile/internal/types2/typexpr.go b/src/cmd/compile/internal/types2/typexpr.go
index e64d804c30..241c6d35fe 100644
--- a/src/cmd/compile/internal/types2/typexpr.go
+++ b/src/cmd/compile/internal/types2/typexpr.go
@@ -10,14 +10,9 @@ import (
"cmd/compile/internal/syntax"
"fmt"
"go/constant"
- "sort"
- "strconv"
"strings"
)
-// Disabled by default, but enabled when running tests (via types_test.go).
-var acceptMethodTypeParams bool
-
// ident type-checks identifier e and initializes x with the value or type of e.
// If an error occurred, x.mode is set to invalid.
// For the meaning of def, see Checker.definedType, below.
@@ -30,7 +25,8 @@ func (check *Checker) ident(x *operand, e *syntax.Name, def *Named, wantType boo
// Note that we cannot use check.lookup here because the returned scope
// may be different from obj.Parent(). See also Scope.LookupParent doc.
scope, obj := check.scope.LookupParent(e.Value, check.pos)
- if obj == nil {
+ switch obj {
+ case nil:
if e.Value == "_" {
check.error(e, "cannot use _ as value or type")
} else {
@@ -41,6 +37,16 @@ func (check *Checker) ident(x *operand, e *syntax.Name, def *Named, wantType boo
}
}
return
+ case universeAny, universeComparable:
+ if !check.allowVersion(check.pkg, 1, 18) {
+ check.errorf(e, "undeclared name: %s (requires version go1.18 or later)", e.Value)
+ return
+ }
+ // If we allow "any" for general use, this if-statement can be removed (issue #33232).
+ if obj == universeAny {
+ check.error(e, "cannot use any outside constraint position")
+ return
+ }
}
check.recordUse(e, obj)
@@ -63,7 +69,7 @@ func (check *Checker) ident(x *operand, e *syntax.Name, def *Named, wantType boo
// If so, mark the respective package as used.
// (This code is only needed for dot-imports. Without them,
// we only have to mark variables, see *Var case below).
- if pkgName := check.dotImportMap[dotImportKey{scope, obj}]; pkgName != nil {
+ if pkgName := check.dotImportMap[dotImportKey{scope, obj.Name()}]; pkgName != nil {
pkgName.used = true
}
@@ -141,18 +147,18 @@ func (check *Checker) varType(e syntax.Expr) Type {
// ordinaryType reports an error if typ is an interface type containing
// type lists or is (or embeds) the predeclared type comparable.
func (check *Checker) ordinaryType(pos syntax.Pos, typ Type) {
- // We don't want to call under() (via Interface) or complete interfaces while we
+ // We don't want to call under() (via asInterface) or complete interfaces while we
// are in the middle of type-checking parameter declarations that might belong to
// interface methods. Delay this check to the end of type-checking.
check.later(func() {
if t := asInterface(typ); t != nil {
- check.completeInterface(pos, t) // TODO(gri) is this the correct position?
- if t.allTypes != nil {
- check.softErrorf(pos, "interface contains type constraints (%s)", t.allTypes)
- return
- }
- if t.IsComparable() {
- check.softErrorf(pos, "interface is (or embeds) comparable")
+ tset := computeInterfaceTypeSet(check, pos, t) // TODO(gri) is this the correct position?
+ if !tset.IsMethodSet() {
+ if tset.comparable {
+ check.softErrorf(pos, "interface is (or embeds) comparable")
+ } else {
+ check.softErrorf(pos, "interface contains type constraints")
+ }
}
}
})
@@ -198,238 +204,6 @@ func (check *Checker) genericType(e syntax.Expr, reportErr bool) Type {
return typ
}
-// isubst returns an x with identifiers substituted per the substitution map smap.
-// isubst only handles the case of (valid) method receiver type expressions correctly.
-func isubst(x syntax.Expr, smap map[*syntax.Name]*syntax.Name) syntax.Expr {
- switch n := x.(type) {
- case *syntax.Name:
- if alt := smap[n]; alt != nil {
- return alt
- }
- // case *syntax.StarExpr:
- // X := isubst(n.X, smap)
- // if X != n.X {
- // new := *n
- // new.X = X
- // return &new
- // }
- case *syntax.Operation:
- if n.Op == syntax.Mul && n.Y == nil {
- X := isubst(n.X, smap)
- if X != n.X {
- new := *n
- new.X = X
- return &new
- }
- }
- case *syntax.IndexExpr:
- Index := isubst(n.Index, smap)
- if Index != n.Index {
- new := *n
- new.Index = Index
- return &new
- }
- case *syntax.ListExpr:
- var elems []syntax.Expr
- for i, elem := range n.ElemList {
- new := isubst(elem, smap)
- if new != elem {
- if elems == nil {
- elems = make([]syntax.Expr, len(n.ElemList))
- copy(elems, n.ElemList)
- }
- elems[i] = new
- }
- }
- if elems != nil {
- new := *n
- new.ElemList = elems
- return &new
- }
- case *syntax.ParenExpr:
- return isubst(n.X, smap) // no need to keep parentheses
- default:
- // Other receiver type expressions are invalid.
- // It's fine to ignore those here as they will
- // be checked elsewhere.
- }
- return x
-}
-
-// funcType type-checks a function or method type.
-func (check *Checker) funcType(sig *Signature, recvPar *syntax.Field, tparams []*syntax.Field, ftyp *syntax.FuncType) {
- check.openScope(ftyp, "function")
- check.scope.isFunc = true
- check.recordScope(ftyp, check.scope)
- sig.scope = check.scope
- defer check.closeScope()
-
- var recvTyp syntax.Expr // rewritten receiver type; valid if != nil
- if recvPar != nil {
- // collect generic receiver type parameters, if any
- // - a receiver type parameter is like any other type parameter, except that it is declared implicitly
- // - the receiver specification acts as local declaration for its type parameters, which may be blank
- _, rname, rparams := check.unpackRecv(recvPar.Type, true)
- if len(rparams) > 0 {
- // Blank identifiers don't get declared and regular type-checking of the instantiated
- // parameterized receiver type expression fails in Checker.collectParams of receiver.
- // Identify blank type parameters and substitute each with a unique new identifier named
- // "n_" (where n is the parameter index) and which cannot conflict with any user-defined
- // name.
- var smap map[*syntax.Name]*syntax.Name // substitution map from "_" to "!n" identifiers
- for i, p := range rparams {
- if p.Value == "_" {
- new := *p
- new.Value = fmt.Sprintf("%d_", i)
- rparams[i] = &new // use n_ identifier instead of _ so it can be looked up
- if smap == nil {
- smap = make(map[*syntax.Name]*syntax.Name)
- }
- smap[p] = &new
- }
- }
- if smap != nil {
- // blank identifiers were found => use rewritten receiver type
- recvTyp = isubst(recvPar.Type, smap)
- }
- // TODO(gri) rework declareTypeParams
- sig.rparams = nil
- for _, rparam := range rparams {
- sig.rparams = check.declareTypeParam(sig.rparams, rparam)
- }
- // determine receiver type to get its type parameters
- // and the respective type parameter bounds
- var recvTParams []*TypeName
- if rname != nil {
- // recv should be a Named type (otherwise an error is reported elsewhere)
- // Also: Don't report an error via genericType since it will be reported
- // again when we type-check the signature.
- // TODO(gri) maybe the receiver should be marked as invalid instead?
- if recv := asNamed(check.genericType(rname, false)); recv != nil {
- recvTParams = recv.tparams
- }
- }
- // provide type parameter bounds
- // - only do this if we have the right number (otherwise an error is reported elsewhere)
- if len(sig.rparams) == len(recvTParams) {
- // We have a list of *TypeNames but we need a list of Types.
- list := make([]Type, len(sig.rparams))
- for i, t := range sig.rparams {
- list[i] = t.typ
- }
- smap := makeSubstMap(recvTParams, list)
- for i, tname := range sig.rparams {
- bound := recvTParams[i].typ.(*TypeParam).bound
- // bound is (possibly) parameterized in the context of the
- // receiver type declaration. Substitute parameters for the
- // current context.
- // TODO(gri) should we assume now that bounds always exist?
- // (no bound == empty interface)
- if bound != nil {
- bound = check.subst(tname.pos, bound, smap)
- tname.typ.(*TypeParam).bound = bound
- }
- }
- }
- }
- }
-
- if tparams != nil {
- sig.tparams = check.collectTypeParams(tparams)
- // Always type-check method type parameters but complain if they are not enabled.
- // (A separate check is needed when type-checking interface method signatures because
- // they don't have a receiver specification.)
- if recvPar != nil && !acceptMethodTypeParams {
- check.error(ftyp, "methods cannot have type parameters")
- }
- }
-
- // Value (non-type) parameters' scope starts in the function body. Use a temporary scope for their
- // declarations and then squash that scope into the parent scope (and report any redeclarations at
- // that time).
- scope := NewScope(check.scope, nopos, nopos, "function body (temp. scope)")
- var recvList []*Var // TODO(gri) remove the need for making a list here
- if recvPar != nil {
- recvList, _ = check.collectParams(scope, []*syntax.Field{recvPar}, recvTyp, false) // use rewritten receiver type, if any
- }
- params, variadic := check.collectParams(scope, ftyp.ParamList, nil, true)
- results, _ := check.collectParams(scope, ftyp.ResultList, nil, false)
- scope.Squash(func(obj, alt Object) {
- var err error_
- err.errorf(obj, "%s redeclared in this block", obj.Name())
- err.recordAltDecl(alt)
- check.report(&err)
- })
-
- if recvPar != nil {
- // recv parameter list present (may be empty)
- // spec: "The receiver is specified via an extra parameter section preceding the
- // method name. That parameter section must declare a single parameter, the receiver."
- var recv *Var
- switch len(recvList) {
- case 0:
- // error reported by resolver
- recv = NewParam(nopos, nil, "", Typ[Invalid]) // ignore recv below
- default:
- // more than one receiver
- check.error(recvList[len(recvList)-1].Pos(), "method must have exactly one receiver")
- fallthrough // continue with first receiver
- case 1:
- recv = recvList[0]
- }
-
- // TODO(gri) We should delay rtyp expansion to when we actually need the
- // receiver; thus all checks here should be delayed to later.
- rtyp, _ := deref(recv.typ)
- rtyp = expand(rtyp)
-
- // spec: "The receiver type must be of the form T or *T where T is a type name."
- // (ignore invalid types - error was reported before)
- if t := rtyp; t != Typ[Invalid] {
- var err string
- if T := asNamed(t); T != nil {
- // spec: "The type denoted by T is called the receiver base type; it must not
- // be a pointer or interface type and it must be declared in the same package
- // as the method."
- if T.obj.pkg != check.pkg {
- err = "type not defined in this package"
- if check.conf.CompilerErrorMessages {
- check.errorf(recv.pos, "cannot define new methods on non-local type %s", recv.typ)
- err = ""
- }
- } else {
- switch u := optype(T).(type) {
- case *Basic:
- // unsafe.Pointer is treated like a regular pointer
- if u.kind == UnsafePointer {
- err = "unsafe.Pointer"
- }
- case *Pointer, *Interface:
- err = "pointer or interface type"
- }
- }
- } else if T := asBasic(t); T != nil {
- err = "basic or unnamed type"
- if check.conf.CompilerErrorMessages {
- check.errorf(recv.pos, "cannot define new methods on non-local type %s", recv.typ)
- err = ""
- }
- } else {
- check.errorf(recv.pos, "invalid receiver type %s", recv.typ)
- }
- if err != "" {
- check.errorf(recv.pos, "invalid receiver type %s (%s)", recv.typ, err)
- // ok to continue
- }
- }
- sig.recv = recv
- }
-
- sig.params = NewTuple(params...)
- sig.results = NewTuple(results...)
- sig.variadic = variadic
-}
-
// goTypeName returns the Go type name for typ and
// removes any occurrences of "types2." from that name.
func goTypeName(typ Type) string {
@@ -451,7 +225,7 @@ func (check *Checker) typInternal(e0 syntax.Expr, def *Named) (T Type) {
// Test case: type T[P any] *T[P]
// TODO(gri) investigate if that's a bug or to be expected
// (see also analogous comment in Checker.instantiate).
- under = T.Underlying()
+ under = safeUnderlying(T)
}
if T == under {
check.trace(e0.Pos(), "=> %s // %s", T, goTypeName(T))
@@ -647,44 +421,36 @@ func (check *Checker) typOrNil(e syntax.Expr) Type {
return Typ[Invalid]
}
-func (check *Checker) instantiatedType(x syntax.Expr, targs []syntax.Expr, def *Named) Type {
- b := check.genericType(x, true) // TODO(gri) what about cycles?
- if b == Typ[Invalid] {
- return b // error already reported
+func (check *Checker) instantiatedType(x syntax.Expr, targsx []syntax.Expr, def *Named) Type {
+ gtyp := check.genericType(x, true)
+ if gtyp == Typ[Invalid] {
+ return gtyp // error already reported
}
- base := asNamed(b)
+ base, _ := gtyp.(*Named)
if base == nil {
- unreachable() // should have been caught by genericType
+ panic(fmt.Sprintf("%v: cannot instantiate %v", x.Pos(), gtyp))
}
- // create a new type instance rather than instantiate the type
- // TODO(gri) should do argument number check here rather than
- // when instantiating the type?
- typ := new(instance)
- def.setUnderlying(typ)
-
- typ.check = check
- typ.pos = x.Pos()
- typ.base = base
-
- // evaluate arguments (always)
- typ.targs = check.typeList(targs)
- if typ.targs == nil {
+ // evaluate arguments
+ targs := check.typeList(targsx)
+ if targs == nil {
def.setUnderlying(Typ[Invalid]) // avoid later errors due to lazy instantiation
return Typ[Invalid]
}
- // determine argument positions (for error reporting)
- typ.poslist = make([]syntax.Pos, len(targs))
- for i, arg := range targs {
- typ.poslist[i] = syntax.StartPos(arg)
+ // determine argument positions
+ posList := make([]syntax.Pos, len(targs))
+ for i, arg := range targsx {
+ posList[i] = syntax.StartPos(arg)
}
+ typ := check.instantiate(x.Pos(), base, targs, posList)
+ def.setUnderlying(typ)
+
// make sure we check instantiation works at least once
// and that the resulting type is valid
check.later(func() {
- t := typ.expand()
- check.validType(t, nil)
+ check.validType(typ, nil)
})
return typ
@@ -732,537 +498,3 @@ func (check *Checker) typeList(list []syntax.Expr) []Type {
}
return res
}
-
-// collectParams declares the parameters of list in scope and returns the corresponding
-// variable list. If type0 != nil, it is used instead of the first type in list.
-func (check *Checker) collectParams(scope *Scope, list []*syntax.Field, type0 syntax.Expr, variadicOk bool) (params []*Var, variadic bool) {
- if list == nil {
- return
- }
-
- var named, anonymous bool
-
- var typ Type
- var prev syntax.Expr
- for i, field := range list {
- ftype := field.Type
- // type-check type of grouped fields only once
- if ftype != prev {
- prev = ftype
- if i == 0 && type0 != nil {
- ftype = type0
- }
- if t, _ := ftype.(*syntax.DotsType); t != nil {
- ftype = t.Elem
- if variadicOk && i == len(list)-1 {
- variadic = true
- } else {
- check.softErrorf(t, "can only use ... with final parameter in list")
- // ignore ... and continue
- }
- }
- typ = check.varType(ftype)
- }
- // The parser ensures that f.Tag is nil and we don't
- // care if a constructed AST contains a non-nil tag.
- if field.Name != nil {
- // named parameter
- name := field.Name.Value
- if name == "" {
- check.error(field.Name, invalidAST+"anonymous parameter")
- // ok to continue
- }
- par := NewParam(field.Name.Pos(), check.pkg, name, typ)
- check.declare(scope, field.Name, par, scope.pos)
- params = append(params, par)
- named = true
- } else {
- // anonymous parameter
- par := NewParam(ftype.Pos(), check.pkg, "", typ)
- check.recordImplicit(field, par)
- params = append(params, par)
- anonymous = true
- }
- }
-
- if named && anonymous {
- check.error(list[0], invalidAST+"list contains both named and anonymous parameters")
- // ok to continue
- }
-
- // For a variadic function, change the last parameter's type from T to []T.
- // Since we type-checked T rather than ...T, we also need to retro-actively
- // record the type for ...T.
- if variadic {
- last := params[len(params)-1]
- last.typ = &Slice{elem: last.typ}
- check.recordTypeAndValue(list[len(list)-1].Type, typexpr, last.typ, nil)
- }
-
- return
-}
-
-func (check *Checker) declareInSet(oset *objset, pos syntax.Pos, obj Object) bool {
- if alt := oset.insert(obj); alt != nil {
- var err error_
- err.errorf(pos, "%s redeclared", obj.Name())
- err.recordAltDecl(alt)
- check.report(&err)
- return false
- }
- return true
-}
-
-func (check *Checker) interfaceType(ityp *Interface, iface *syntax.InterfaceType, def *Named) {
- var tname *syntax.Name // most recent "type" name
- var types []syntax.Expr
- for _, f := range iface.MethodList {
- if f.Name != nil {
- // We have a method with name f.Name, or a type
- // of a type list (f.Name.Value == "type").
- name := f.Name.Value
- if name == "_" {
- if check.conf.CompilerErrorMessages {
- check.error(f.Name, "methods must have a unique non-blank name")
- } else {
- check.error(f.Name, "invalid method name _")
- }
- continue // ignore
- }
-
- if name == "type" {
- // Always collect all type list entries, even from
- // different type lists, under the assumption that
- // the author intended to include all types.
- types = append(types, f.Type)
- if tname != nil && tname != f.Name {
- check.error(f.Name, "cannot have multiple type lists in an interface")
- }
- tname = f.Name
- continue
- }
-
- typ := check.typ(f.Type)
- sig, _ := typ.(*Signature)
- if sig == nil {
- if typ != Typ[Invalid] {
- check.errorf(f.Type, invalidAST+"%s is not a method signature", typ)
- }
- continue // ignore
- }
-
- // Always type-check method type parameters but complain if they are not enabled.
- // (This extra check is needed here because interface method signatures don't have
- // a receiver specification.)
- if sig.tparams != nil && !acceptMethodTypeParams {
- check.error(f.Type, "methods cannot have type parameters")
- }
-
- // use named receiver type if available (for better error messages)
- var recvTyp Type = ityp
- if def != nil {
- recvTyp = def
- }
- sig.recv = NewVar(f.Name.Pos(), check.pkg, "", recvTyp)
-
- m := NewFunc(f.Name.Pos(), check.pkg, name, sig)
- check.recordDef(f.Name, m)
- ityp.methods = append(ityp.methods, m)
- } else {
- // We have an embedded type. completeInterface will
- // eventually verify that we have an interface.
- ityp.embeddeds = append(ityp.embeddeds, check.typ(f.Type))
- check.posMap[ityp] = append(check.posMap[ityp], f.Type.Pos())
- }
- }
-
- // type constraints
- ityp.types = NewSum(check.collectTypeConstraints(iface.Pos(), types))
-
- if len(ityp.methods) == 0 && ityp.types == nil && len(ityp.embeddeds) == 0 {
- // empty interface
- ityp.allMethods = markComplete
- return
- }
-
- // sort for API stability
- sortMethods(ityp.methods)
- sortTypes(ityp.embeddeds)
-
- check.later(func() { check.completeInterface(iface.Pos(), ityp) })
-}
-
-func (check *Checker) completeInterface(pos syntax.Pos, ityp *Interface) {
- if ityp.allMethods != nil {
- return
- }
-
- // completeInterface may be called via the LookupFieldOrMethod,
- // MissingMethod, Identical, or IdenticalIgnoreTags external API
- // in which case check will be nil. In this case, type-checking
- // must be finished and all interfaces should have been completed.
- if check == nil {
- panic("internal error: incomplete interface")
- }
-
- if check.conf.Trace {
- // Types don't generally have position information.
- // If we don't have a valid pos provided, try to use
- // one close enough.
- if !pos.IsKnown() && len(ityp.methods) > 0 {
- pos = ityp.methods[0].pos
- }
-
- check.trace(pos, "complete %s", ityp)
- check.indent++
- defer func() {
- check.indent--
- check.trace(pos, "=> %s (methods = %v, types = %v)", ityp, ityp.allMethods, ityp.allTypes)
- }()
- }
-
- // An infinitely expanding interface (due to a cycle) is detected
- // elsewhere (Checker.validType), so here we simply assume we only
- // have valid interfaces. Mark the interface as complete to avoid
- // infinite recursion if the validType check occurs later for some
- // reason.
- ityp.allMethods = markComplete
-
- // Methods of embedded interfaces are collected unchanged; i.e., the identity
- // of a method I.m's Func Object of an interface I is the same as that of
- // the method m in an interface that embeds interface I. On the other hand,
- // if a method is embedded via multiple overlapping embedded interfaces, we
- // don't provide a guarantee which "original m" got chosen for the embedding
- // interface. See also issue #34421.
- //
- // If we don't care to provide this identity guarantee anymore, instead of
- // reusing the original method in embeddings, we can clone the method's Func
- // Object and give it the position of a corresponding embedded interface. Then
- // we can get rid of the mpos map below and simply use the cloned method's
- // position.
-
- var seen objset
- var methods []*Func
- mpos := make(map[*Func]syntax.Pos) // method specification or method embedding position, for good error messages
- addMethod := func(pos syntax.Pos, m *Func, explicit bool) {
- switch other := seen.insert(m); {
- case other == nil:
- methods = append(methods, m)
- mpos[m] = pos
- case explicit:
- var err error_
- err.errorf(pos, "duplicate method %s", m.name)
- err.errorf(mpos[other.(*Func)], "other declaration of %s", m.name)
- check.report(&err)
- default:
- // We have a duplicate method name in an embedded (not explicitly declared) method.
- // Check method signatures after all types are computed (issue #33656).
- // If we're pre-go1.14 (overlapping embeddings are not permitted), report that
- // error here as well (even though we could do it eagerly) because it's the same
- // error message.
- check.later(func() {
- if !check.allowVersion(m.pkg, 1, 14) || !check.identical(m.typ, other.Type()) {
- var err error_
- err.errorf(pos, "duplicate method %s", m.name)
- err.errorf(mpos[other.(*Func)], "other declaration of %s", m.name)
- check.report(&err)
- }
- })
- }
- }
-
- for _, m := range ityp.methods {
- addMethod(m.pos, m, true)
- }
-
- // collect types
- allTypes := ityp.types
-
- posList := check.posMap[ityp]
- for i, typ := range ityp.embeddeds {
- pos := posList[i] // embedding position
- utyp := under(typ)
- etyp := asInterface(utyp)
- if etyp == nil {
- if utyp != Typ[Invalid] {
- var format string
- if _, ok := utyp.(*TypeParam); ok {
- format = "%s is a type parameter, not an interface"
- } else {
- format = "%s is not an interface"
- }
- check.errorf(pos, format, typ)
- }
- continue
- }
- check.completeInterface(pos, etyp)
- for _, m := range etyp.allMethods {
- addMethod(pos, m, false) // use embedding position pos rather than m.pos
- }
- allTypes = intersect(allTypes, etyp.allTypes)
- }
-
- if methods != nil {
- sortMethods(methods)
- ityp.allMethods = methods
- }
- ityp.allTypes = allTypes
-}
-
-// intersect computes the intersection of the types x and y.
-// Note: A incomming nil type stands for the top type. A top
-// type result is returned as nil.
-func intersect(x, y Type) (r Type) {
- defer func() {
- if r == theTop {
- r = nil
- }
- }()
-
- switch {
- case x == theBottom || y == theBottom:
- return theBottom
- case x == nil || x == theTop:
- return y
- case y == nil || x == theTop:
- return x
- }
-
- xtypes := unpack(x)
- ytypes := unpack(y)
- // Compute the list rtypes which includes only
- // types that are in both xtypes and ytypes.
- // Quadratic algorithm, but good enough for now.
- // TODO(gri) fix this
- var rtypes []Type
- for _, x := range xtypes {
- if includes(ytypes, x) {
- rtypes = append(rtypes, x)
- }
- }
-
- if rtypes == nil {
- return theBottom
- }
- return NewSum(rtypes)
-}
-
-func sortTypes(list []Type) {
- sort.Stable(byUniqueTypeName(list))
-}
-
-// byUniqueTypeName named type lists can be sorted by their unique type names.
-type byUniqueTypeName []Type
-
-func (a byUniqueTypeName) Len() int { return len(a) }
-func (a byUniqueTypeName) Less(i, j int) bool { return sortName(a[i]) < sortName(a[j]) }
-func (a byUniqueTypeName) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
-
-func sortName(t Type) string {
- if named := asNamed(t); named != nil {
- return named.obj.Id()
- }
- return ""
-}
-
-func sortMethods(list []*Func) {
- sort.Sort(byUniqueMethodName(list))
-}
-
-func assertSortedMethods(list []*Func) {
- if !debug {
- panic("internal error: assertSortedMethods called outside debug mode")
- }
- if !sort.IsSorted(byUniqueMethodName(list)) {
- panic("internal error: methods not sorted")
- }
-}
-
-// byUniqueMethodName method lists can be sorted by their unique method names.
-type byUniqueMethodName []*Func
-
-func (a byUniqueMethodName) Len() int { return len(a) }
-func (a byUniqueMethodName) Less(i, j int) bool { return a[i].less(a[j]) }
-func (a byUniqueMethodName) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
-
-func (check *Checker) tag(t *syntax.BasicLit) string {
- // If t.Bad, an error was reported during parsing.
- if t != nil && !t.Bad {
- if t.Kind == syntax.StringLit {
- if val, err := strconv.Unquote(t.Value); err == nil {
- return val
- }
- }
- check.errorf(t, invalidAST+"incorrect tag syntax: %q", t.Value)
- }
- return ""
-}
-
-func (check *Checker) structType(styp *Struct, e *syntax.StructType) {
- if e.FieldList == nil {
- return
- }
-
- // struct fields and tags
- var fields []*Var
- var tags []string
-
- // for double-declaration checks
- var fset objset
-
- // current field typ and tag
- var typ Type
- var tag string
- add := func(ident *syntax.Name, embedded bool, pos syntax.Pos) {
- if tag != "" && tags == nil {
- tags = make([]string, len(fields))
- }
- if tags != nil {
- tags = append(tags, tag)
- }
-
- name := ident.Value
- fld := NewField(pos, check.pkg, name, typ, embedded)
- // spec: "Within a struct, non-blank field names must be unique."
- if name == "_" || check.declareInSet(&fset, pos, fld) {
- fields = append(fields, fld)
- check.recordDef(ident, fld)
- }
- }
-
- // addInvalid adds an embedded field of invalid type to the struct for
- // fields with errors; this keeps the number of struct fields in sync
- // with the source as long as the fields are _ or have different names
- // (issue #25627).
- addInvalid := func(ident *syntax.Name, pos syntax.Pos) {
- typ = Typ[Invalid]
- tag = ""
- add(ident, true, pos)
- }
-
- var prev syntax.Expr
- for i, f := range e.FieldList {
- // Fields declared syntactically with the same type (e.g.: a, b, c T)
- // share the same type expression. Only check type if it's a new type.
- if i == 0 || f.Type != prev {
- typ = check.varType(f.Type)
- prev = f.Type
- }
- tag = ""
- if i < len(e.TagList) {
- tag = check.tag(e.TagList[i])
- }
- if f.Name != nil {
- // named field
- add(f.Name, false, f.Name.Pos())
- } else {
- // embedded field
- // spec: "An embedded type must be specified as a type name T or as a
- // pointer to a non-interface type name *T, and T itself may not be a
- // pointer type."
- pos := syntax.StartPos(f.Type)
- name := embeddedFieldIdent(f.Type)
- if name == nil {
- check.errorf(pos, "invalid embedded field type %s", f.Type)
- name = &syntax.Name{Value: "_"} // TODO(gri) need to set position to pos
- addInvalid(name, pos)
- continue
- }
- add(name, true, pos)
-
- // Because we have a name, typ must be of the form T or *T, where T is the name
- // of a (named or alias) type, and t (= deref(typ)) must be the type of T.
- // We must delay this check to the end because we don't want to instantiate
- // (via under(t)) a possibly incomplete type.
- embeddedTyp := typ // for closure below
- embeddedPos := pos
- check.later(func() {
- t, isPtr := deref(embeddedTyp)
- switch t := optype(t).(type) {
- case *Basic:
- if t == Typ[Invalid] {
- // error was reported before
- return
- }
- // unsafe.Pointer is treated like a regular pointer
- if t.kind == UnsafePointer {
- check.error(embeddedPos, "embedded field type cannot be unsafe.Pointer")
- }
- case *Pointer:
- check.error(embeddedPos, "embedded field type cannot be a pointer")
- case *Interface:
- if isPtr {
- check.error(embeddedPos, "embedded field type cannot be a pointer to an interface")
- }
- }
- })
- }
- }
-
- styp.fields = fields
- styp.tags = tags
-}
-
-func embeddedFieldIdent(e syntax.Expr) *syntax.Name {
- switch e := e.(type) {
- case *syntax.Name:
- return e
- case *syntax.Operation:
- if base := ptrBase(e); base != nil {
- // *T is valid, but **T is not
- if op, _ := base.(*syntax.Operation); op == nil || ptrBase(op) == nil {
- return embeddedFieldIdent(e.X)
- }
- }
- case *syntax.SelectorExpr:
- return e.Sel
- case *syntax.IndexExpr:
- return embeddedFieldIdent(e.X)
- }
- return nil // invalid embedded field
-}
-
-func (check *Checker) collectTypeConstraints(pos syntax.Pos, types []syntax.Expr) []Type {
- list := make([]Type, 0, len(types)) // assume all types are correct
- for _, texpr := range types {
- if texpr == nil {
- check.error(pos, invalidAST+"missing type constraint")
- continue
- }
- list = append(list, check.varType(texpr))
- }
-
- // Ensure that each type is only present once in the type list. Types may be
- // interfaces, which may not be complete yet. It's ok to do this check at the
- // end because it's not a requirement for correctness of the code.
- // Note: This is a quadratic algorithm, but type lists tend to be short.
- check.later(func() {
- for i, t := range list {
- if t := asInterface(t); t != nil {
- check.completeInterface(types[i].Pos(), t)
- }
- if includes(list[:i], t) {
- check.softErrorf(types[i], "duplicate type %s in type list", t)
- }
- }
- })
-
- return list
-}
-
-// includes reports whether typ is in list
-func includes(list []Type, typ Type) bool {
- for _, e := range list {
- if Identical(typ, e) {
- return true
- }
- }
- return false
-}
-
-func ptrBase(x *syntax.Operation) syntax.Expr {
- if x.Op == syntax.Mul && x.Y == nil {
- return x.X
- }
- return nil
-}
diff --git a/src/cmd/compile/internal/types2/unify.go b/src/cmd/compile/internal/types2/unify.go
index e1832bbb2a..d4fbebc11b 100644
--- a/src/cmd/compile/internal/types2/unify.go
+++ b/src/cmd/compile/internal/types2/unify.go
@@ -6,7 +6,10 @@
package types2
-import "bytes"
+import (
+ "bytes"
+ "fmt"
+)
// The unifier maintains two separate sets of type parameters x and y
// which are used to resolve type parameters in the x and y arguments
@@ -34,7 +37,6 @@ import "bytes"
// and the respective types inferred for each type parameter.
// A unifier is created by calling newUnifier.
type unifier struct {
- check *Checker
exact bool
x, y tparamsList // x and y must initialized via tparamsList.init
types []Type // inferred types, shared by x and y
@@ -45,8 +47,8 @@ type unifier struct {
// exactly. If exact is not set, a named type's underlying type
// is considered if unification would fail otherwise, and the
// direction of channels is ignored.
-func newUnifier(check *Checker, exact bool) *unifier {
- u := &unifier{check: check, exact: exact}
+func newUnifier(exact bool) *unifier {
+ u := &unifier{exact: exact}
u.x.unifier = u
u.y.unifier = u
return u
@@ -60,7 +62,7 @@ func (u *unifier) unify(x, y Type) bool {
// A tparamsList describes a list of type parameters and the types inferred for them.
type tparamsList struct {
unifier *unifier
- tparams []*TypeName
+ tparams []*TypeParam
// For each tparams element, there is a corresponding type slot index in indices.
// index < 0: unifier.types[-index-1] == nil
// index == 0: no type slot allocated yet
@@ -75,11 +77,11 @@ type tparamsList struct {
func (d *tparamsList) String() string {
var buf bytes.Buffer
buf.WriteByte('[')
- for i, tname := range d.tparams {
+ for i, tpar := range d.tparams {
if i > 0 {
buf.WriteString(", ")
}
- writeType(&buf, tname.typ, nil, nil)
+ writeType(&buf, tpar, nil, nil)
buf.WriteString(": ")
writeType(&buf, d.at(i), nil, nil)
}
@@ -90,13 +92,13 @@ func (d *tparamsList) String() string {
// init initializes d with the given type parameters.
// The type parameters must be in the order in which they appear in their declaration
// (this ensures that the tparams indices match the respective type parameter index).
-func (d *tparamsList) init(tparams []*TypeName) {
+func (d *tparamsList) init(tparams []*TypeParam) {
if len(tparams) == 0 {
return
}
if debug {
for i, tpar := range tparams {
- assert(i == tpar.typ.(*TypeParam).index)
+ assert(i == tpar.index)
}
}
d.tparams = tparams
@@ -148,10 +150,17 @@ func (u *unifier) join(i, j int) bool {
// If typ is a type parameter of d, index returns the type parameter index.
// Otherwise, the result is < 0.
func (d *tparamsList) index(typ Type) int {
- if t, ok := typ.(*TypeParam); ok {
- if i := t.index; i < len(d.tparams) && d.tparams[i].typ == t {
- return i
- }
+ if tpar, ok := typ.(*TypeParam); ok {
+ return tparamIndex(d.tparams, tpar)
+ }
+ return -1
+}
+
+// If tpar is a type parameter in list, tparamIndex returns the type parameter index.
+// Otherwise, the result is < 0. tpar must not be nil.
+func tparamIndex(list []*TypeParam, tpar *TypeParam) int {
+ if i := tpar.index; i < len(list) && list[i] == tpar {
+ return i
}
return -1
}
@@ -220,10 +229,6 @@ func (u *unifier) nifyEq(x, y Type, p *ifacePair) bool {
// code the corresponding changes should be made here.
// Must not be called directly from outside the unifier.
func (u *unifier) nify(x, y Type, p *ifacePair) bool {
- // types must be expanded for comparison
- x = expand(x)
- y = expand(y)
-
if !u.exact {
// If exact unification is known to fail because we attempt to
// match a type name against an unnamed type literal, consider
@@ -352,25 +357,18 @@ func (u *unifier) nify(x, y Type, p *ifacePair) bool {
u.nify(x.results, y.results, p)
}
- case *Sum:
- // This should not happen with the current internal use of sum types.
- panic("type inference across sum types not implemented")
-
case *Interface:
// Two interface types are identical if they have the same set of methods with
// the same names and identical function types. Lower-case method names from
// different packages are always different. The order of the methods is irrelevant.
if y, ok := y.(*Interface); ok {
- // If identical0 is called (indirectly) via an external API entry point
- // (such as Identical, IdenticalIgnoreTags, etc.), check is nil. But in
- // that case, interfaces are expected to be complete and lazy completion
- // here is not needed.
- if u.check != nil {
- u.check.completeInterface(nopos, x)
- u.check.completeInterface(nopos, y)
+ xset := x.typeSet()
+ yset := y.typeSet()
+ if !xset.terms.equal(yset.terms) {
+ return false
}
- a := x.allMethods
- b := y.allMethods
+ a := xset.methods
+ b := yset.methods
if len(a) == len(b) {
// Interface types are the only types where cycles can occur
// that are not "terminated" via named types; and such cycles
@@ -428,19 +426,20 @@ func (u *unifier) nify(x, y Type, p *ifacePair) bool {
}
case *Named:
- // Two named types are identical if their type names originate
- // in the same type declaration.
- // if y, ok := y.(*Named); ok {
- // return x.obj == y.obj
- // }
if y, ok := y.(*Named); ok {
+ x.expand(nil)
+ y.expand(nil)
+
+ xargs := x.targs.list()
+ yargs := y.targs.list()
+
// TODO(gri) This is not always correct: two types may have the same names
// in the same package if one of them is nested in a function.
// Extremely unlikely but we need an always correct solution.
if x.obj.pkg == y.obj.pkg && x.obj.name == y.obj.name {
- assert(len(x.targs) == len(y.targs))
- for i, x := range x.targs {
- if !u.nify(x, y.targs[i], p) {
+ assert(len(xargs) == len(yargs))
+ for i, x := range xargs {
+ if !u.nify(x, yargs[i], p) {
return false
}
}
@@ -454,15 +453,11 @@ func (u *unifier) nify(x, y Type, p *ifacePair) bool {
// are identical if they originate in the same declaration.
return x == y
- // case *instance:
- // unreachable since types are expanded
-
case nil:
// avoid a crash in case of nil type
default:
- u.check.dump("### u.nify(%s, %s), u.x.tparams = %s", x, y, u.x.tparams)
- unreachable()
+ panic(fmt.Sprintf("### u.nify(%s, %s), u.x.tparams = %s", x, y, u.x.tparams))
}
return false
diff --git a/src/cmd/compile/internal/types2/union.go b/src/cmd/compile/internal/types2/union.go
new file mode 100644
index 0000000000..933e5a2951
--- /dev/null
+++ b/src/cmd/compile/internal/types2/union.go
@@ -0,0 +1,150 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package types2
+
+import "cmd/compile/internal/syntax"
+
+// ----------------------------------------------------------------------------
+// API
+
+// A Union represents a union of terms embedded in an interface.
+type Union struct {
+ terms []*Term // list of syntactical terms (not a canonicalized termlist)
+ tset *_TypeSet // type set described by this union, computed lazily
+}
+
+// NewUnion returns a new Union type with the given terms.
+// It is an error to create an empty union; they are syntactically not possible.
+func NewUnion(terms []*Term) *Union {
+ if len(terms) == 0 {
+ panic("empty union")
+ }
+ return &Union{terms, nil}
+}
+
+func (u *Union) Len() int { return len(u.terms) }
+func (u *Union) Term(i int) *Term { return u.terms[i] }
+
+func (u *Union) Underlying() Type { return u }
+func (u *Union) String() string { return TypeString(u, nil) }
+
+// A Term represents a term in a Union.
+type Term term
+
+// NewTerm returns a new union term.
+func NewTerm(tilde bool, typ Type) *Term { return &Term{tilde, typ} }
+
+func (t *Term) Tilde() bool { return t.tilde }
+func (t *Term) Type() Type { return t.typ }
+func (t *Term) String() string { return (*term)(t).String() }
+
+// ----------------------------------------------------------------------------
+// Implementation
+
+// Avoid excessive type-checking times due to quadratic termlist operations.
+const maxTermCount = 100
+
+// parseUnion parses the given list of type expressions tlist as a union of
+// those expressions. The result is a Union type, or Typ[Invalid] for some
+// errors.
+func parseUnion(check *Checker, tlist []syntax.Expr) Type {
+ var terms []*Term
+ for _, x := range tlist {
+ tilde, typ := parseTilde(check, x)
+ if len(tlist) == 1 && !tilde {
+ return typ // single type (optimization)
+ }
+ if len(terms) >= maxTermCount {
+ check.errorf(x, "cannot handle more than %d union terms (implementation limitation)", maxTermCount)
+ return Typ[Invalid]
+ }
+ terms = append(terms, NewTerm(tilde, typ))
+ }
+
+ // Check validity of terms.
+ // Do this check later because it requires types to be set up.
+ // Note: This is a quadratic algorithm, but unions tend to be short.
+ check.later(func() {
+ for i, t := range terms {
+ if t.typ == Typ[Invalid] {
+ continue
+ }
+
+ x := tlist[i]
+ pos := syntax.StartPos(x)
+ // We may not know the position of x if it was a typechecker-
+ // introduced ~T term for a type list entry T. Use the position
+ // of T instead.
+ // TODO(gri) remove this test once we don't support type lists anymore
+ if !pos.IsKnown() {
+ if op, _ := x.(*syntax.Operation); op != nil {
+ pos = syntax.StartPos(op.X)
+ }
+ }
+
+ u := under(t.typ)
+ f, _ := u.(*Interface)
+ if t.tilde {
+ if f != nil {
+ check.errorf(x, "invalid use of ~ (%s is an interface)", t.typ)
+ continue // don't report another error for t
+ }
+
+ if !Identical(u, t.typ) {
+ check.errorf(x, "invalid use of ~ (underlying type of %s is %s)", t.typ, u)
+ continue // don't report another error for t
+ }
+ }
+
+ // Stand-alone embedded interfaces are ok and are handled by the single-type case
+ // in the beginning. Embedded interfaces with tilde are excluded above. If we reach
+ // here, we must have at least two terms in the union.
+ if f != nil && !f.typeSet().IsTypeSet() {
+ check.errorf(pos, "cannot use %s in union (interface contains methods)", t)
+ continue // don't report another error for t
+ }
+
+ // Report overlapping (non-disjoint) terms such as
+ // a|a, a|~a, ~a|~a, and ~a|A (where under(A) == a).
+ if j := overlappingTerm(terms[:i], t); j >= 0 {
+ check.softErrorf(pos, "overlapping terms %s and %s", t, terms[j])
+ }
+ }
+ })
+
+ return &Union{terms, nil}
+}
+
+func parseTilde(check *Checker, x syntax.Expr) (tilde bool, typ Type) {
+ if op, _ := x.(*syntax.Operation); op != nil && op.Op == syntax.Tilde {
+ x = op.X
+ tilde = true
+ }
+ typ = check.anyType(x)
+ // embedding stand-alone type parameters is not permitted (issue #47127).
+ if _, ok := under(typ).(*TypeParam); ok {
+ check.error(x, "cannot embed a type parameter")
+ typ = Typ[Invalid]
+ }
+ return
+}
+
+// overlappingTerm reports the index of the term x in terms which is
+// overlapping (not disjoint) from y. The result is < 0 if there is no
+// such term.
+func overlappingTerm(terms []*Term, y *Term) int {
+ for i, x := range terms {
+ // disjoint requires non-nil, non-top arguments
+ if debug {
+ if x == nil || x.typ == nil || y == nil || y.typ == nil {
+ panic("empty or top union term")
+ }
+ }
+ if !(*term)(x).disjoint((*term)(y)) {
+ return i
+ }
+ }
+ return -1
+}
diff --git a/src/cmd/compile/internal/types2/universe.go b/src/cmd/compile/internal/types2/universe.go
index 76d4e55e84..a615b4c876 100644
--- a/src/cmd/compile/internal/types2/universe.go
+++ b/src/cmd/compile/internal/types2/universe.go
@@ -20,11 +20,12 @@ var Universe *Scope
var Unsafe *Package
var (
- universeIota *Const
- universeByte *Basic // uint8 alias, but has name "byte"
- universeRune *Basic // int32 alias, but has name "rune"
- universeAny *Interface
- universeError *Named
+ universeIota Object
+ universeByte Type // uint8 alias, but has name "byte"
+ universeRune Type // int32 alias, but has name "rune"
+ universeAny Object
+ universeError Type
+ universeComparable Object
)
// Typ contains the predeclared *Basic types indexed by their
@@ -77,20 +78,30 @@ func defPredeclaredTypes() {
def(NewTypeName(nopos, nil, t.name, t))
}
- // any
- // (Predeclared and entered into universe scope so we do all the
- // usual checks; but removed again from scope later since it's
- // only visible as constraint in a type parameter list.)
+ // type any = interface{}
def(NewTypeName(nopos, nil, "any", &emptyInterface))
- // Error has a nil package in its qualified name since it is in no package
+ // type error interface{ Error() string }
{
+ obj := NewTypeName(nopos, nil, "error", nil)
+ obj.setColor(black)
res := NewVar(nopos, nil, "", Typ[String])
- sig := &Signature{results: NewTuple(res)}
+ sig := NewSignature(nil, nil, NewTuple(res), false)
err := NewFunc(nopos, nil, "Error", sig)
- typ := &Named{underlying: NewInterfaceType([]*Func{err}, nil).Complete()}
+ ityp := &Interface{obj, []*Func{err}, nil, nil, true, nil}
+ computeInterfaceTypeSet(nil, nopos, ityp) // prevent races due to lazy computation of tset
+ typ := NewNamed(obj, ityp, nil)
sig.recv = NewVar(nopos, nil, "", typ)
- def(NewTypeName(nopos, nil, "error", typ))
+ def(obj)
+ }
+
+ // type comparable interface{ /* type set marked comparable */ }
+ {
+ obj := NewTypeName(nopos, nil, "comparable", nil)
+ obj.setColor(black)
+ ityp := &Interface{obj, nil, nil, nil, true, &_TypeSet{true, nil, allTermlist}}
+ NewNamed(obj, ityp, nil)
+ def(obj)
}
}
@@ -200,33 +211,6 @@ func DefPredeclaredTestFuncs() {
def(newBuiltin(_Trace))
}
-func defPredeclaredComparable() {
- // The "comparable" interface can be imagined as defined like
- //
- // type comparable interface {
- // == () untyped bool
- // != () untyped bool
- // }
- //
- // == and != cannot be user-declared but we can declare
- // a magic method == and check for its presence when needed.
-
- // Define interface { == () }. We don't care about the signature
- // for == so leave it empty except for the receiver, which is
- // set up later to match the usual interface method assumptions.
- sig := new(Signature)
- eql := NewFunc(nopos, nil, "==", sig)
- iface := NewInterfaceType([]*Func{eql}, nil).Complete()
-
- // set up the defined type for the interface
- obj := NewTypeName(nopos, nil, "comparable", nil)
- named := NewNamed(obj, iface, nil)
- obj.color_ = black
- sig.recv = NewVar(nopos, nil, "", named) // complete == signature
-
- def(obj)
-}
-
func init() {
Universe = NewScope(nil, nopos, nopos, "universe")
Unsafe = NewPackage("unsafe", "unsafe")
@@ -236,16 +220,13 @@ func init() {
defPredeclaredConsts()
defPredeclaredNil()
defPredeclaredFuncs()
- defPredeclaredComparable()
-
- universeIota = Universe.Lookup("iota").(*Const)
- universeByte = Universe.Lookup("byte").(*TypeName).typ.(*Basic)
- universeRune = Universe.Lookup("rune").(*TypeName).typ.(*Basic)
- universeAny = Universe.Lookup("any").(*TypeName).typ.(*Interface)
- universeError = Universe.Lookup("error").(*TypeName).typ.(*Named)
- // "any" is only visible as constraint in a type parameter list
- delete(Universe.elems, "any")
+ universeIota = Universe.Lookup("iota")
+ universeByte = Universe.Lookup("byte").Type()
+ universeRune = Universe.Lookup("rune").Type()
+ universeAny = Universe.Lookup("any")
+ universeError = Universe.Lookup("error").Type()
+ universeComparable = Universe.Lookup("comparable")
}
// Objects with names containing blanks are internal and not entered into
@@ -277,6 +258,6 @@ func def(obj Object) {
}
}
if scope.Insert(obj) != nil {
- panic("internal error: double declaration")
+ panic("double declaration of predeclared identifier")
}
}
diff --git a/src/cmd/compile/internal/walk/assign.go b/src/cmd/compile/internal/walk/assign.go
index 6d697a53ae..d4c1aafdc1 100644
--- a/src/cmd/compile/internal/walk/assign.go
+++ b/src/cmd/compile/internal/walk/assign.go
@@ -429,6 +429,7 @@ func readsMemory(n ir.Node) bool {
ir.OBITNOT,
ir.OCONV,
ir.OCONVIFACE,
+ ir.OCONVIDATA,
ir.OCONVNOP,
ir.ODIV,
ir.ODOT,
diff --git a/src/cmd/compile/internal/walk/builtin.go b/src/cmd/compile/internal/walk/builtin.go
index 14efc05e32..af4f8f4822 100644
--- a/src/cmd/compile/internal/walk/builtin.go
+++ b/src/cmd/compile/internal/walk/builtin.go
@@ -641,16 +641,9 @@ func walkPrint(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
return walkStmt(typecheck.Stmt(r))
}
-// walkRecover walks an ORECOVER node.
-func walkRecover(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
- // Call gorecover with the FP of this frame.
- // FP is equal to caller's SP plus FixedFrameSize().
- var fp ir.Node = mkcall("getcallersp", types.Types[types.TUINTPTR], init)
- if off := base.Ctxt.FixedFrameSize(); off != 0 {
- fp = ir.NewBinaryExpr(fp.Pos(), ir.OADD, fp, ir.NewInt(off))
- }
- fp = ir.NewConvExpr(fp.Pos(), ir.OCONVNOP, types.NewPtr(types.Types[types.TINT32]), fp)
- return mkcall("gorecover", nn.Type(), init, fp)
+// walkRecover walks an ORECOVERFP node.
+func walkRecoverFP(nn *ir.CallExpr, init *ir.Nodes) ir.Node {
+ return mkcall("gorecover", nn.Type(), init, walkExpr(nn.Args[0], init))
}
func walkUnsafeSlice(n *ir.BinaryExpr, init *ir.Nodes) ir.Node {
diff --git a/src/cmd/compile/internal/walk/closure.go b/src/cmd/compile/internal/walk/closure.go
index 2194e1c5b0..902e01ef38 100644
--- a/src/cmd/compile/internal/walk/closure.go
+++ b/src/cmd/compile/internal/walk/closure.go
@@ -37,14 +37,6 @@ func directClosureCall(n *ir.CallExpr) {
return // leave for walkClosure to handle
}
- // If wrapGoDefer() in the order phase has flagged this call,
- // avoid eliminating the closure even if there is a direct call to
- // (the closure is needed to simplify the register ABI). See
- // wrapGoDefer for more details.
- if n.PreserveClosure {
- return
- }
-
// We are going to insert captured variables before input args.
var params []*types.Field
var decls []*ir.Name
@@ -122,6 +114,9 @@ func walkClosure(clo *ir.ClosureExpr, init *ir.Nodes) ir.Node {
clos := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(typ), nil)
clos.SetEsc(clo.Esc())
clos.List = append([]ir.Node{ir.NewUnaryExpr(base.Pos, ir.OCFUNC, clofn.Nname)}, closureArgs(clo)...)
+ for i, value := range clos.List {
+ clos.List[i] = ir.NewStructKeyExpr(base.Pos, typ.Field(i), value)
+ }
addr := typecheck.NodAddr(clos)
addr.SetEsc(clo.Esc())
@@ -161,7 +156,7 @@ func closureArgs(clo *ir.ClosureExpr) []ir.Node {
return args
}
-func walkCallPart(n *ir.SelectorExpr, init *ir.Nodes) ir.Node {
+func walkMethodValue(n *ir.SelectorExpr, init *ir.Nodes) ir.Node {
// Create closure in the form of a composite literal.
// For x.M with receiver (x) type T, the generated code looks like:
//
@@ -175,18 +170,16 @@ func walkCallPart(n *ir.SelectorExpr, init *ir.Nodes) ir.Node {
n.X = cheapExpr(n.X, init)
n.X = walkExpr(n.X, nil)
- tab := typecheck.Expr(ir.NewUnaryExpr(base.Pos, ir.OITAB, n.X))
-
- c := ir.NewUnaryExpr(base.Pos, ir.OCHECKNIL, tab)
- c.SetTypecheck(1)
- init.Append(c)
+ tab := ir.NewUnaryExpr(base.Pos, ir.OITAB, n.X)
+ check := ir.NewUnaryExpr(base.Pos, ir.OCHECKNIL, tab)
+ init.Append(typecheck.Stmt(check))
}
- typ := typecheck.PartialCallType(n)
+ typ := typecheck.MethodValueType(n)
clos := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(typ), nil)
clos.SetEsc(n.Esc())
- clos.List = []ir.Node{ir.NewUnaryExpr(base.Pos, ir.OCFUNC, typecheck.MethodValueWrapper(n).Nname), n.X}
+ clos.List = []ir.Node{ir.NewUnaryExpr(base.Pos, ir.OCFUNC, methodValueWrapper(n)), n.X}
addr := typecheck.NodAddr(clos)
addr.SetEsc(n.Esc())
@@ -205,3 +198,74 @@ func walkCallPart(n *ir.SelectorExpr, init *ir.Nodes) ir.Node {
return walkExpr(cfn, init)
}
+
+// methodValueWrapper returns the ONAME node representing the
+// wrapper function (*-fm) needed for the given method value. If the
+// wrapper function hasn't already been created yet, it's created and
+// added to typecheck.Target.Decls.
+func methodValueWrapper(dot *ir.SelectorExpr) *ir.Name {
+ if dot.Op() != ir.OMETHVALUE {
+ base.Fatalf("methodValueWrapper: unexpected %v (%v)", dot, dot.Op())
+ }
+
+ t0 := dot.Type()
+ meth := dot.Sel
+ rcvrtype := dot.X.Type()
+ sym := ir.MethodSymSuffix(rcvrtype, meth, "-fm")
+
+ if sym.Uniq() {
+ return sym.Def.(*ir.Name)
+ }
+ sym.SetUniq(true)
+
+ savecurfn := ir.CurFunc
+ saveLineNo := base.Pos
+ ir.CurFunc = nil
+
+ // Set line number equal to the line number where the method is declared.
+ if pos := dot.Selection.Pos; pos.IsKnown() {
+ base.Pos = pos
+ }
+ // Note: !dot.Selection.Pos.IsKnown() happens for method expressions where
+ // the method is implicitly declared. The Error method of the
+ // built-in error type is one such method. We leave the line
+ // number at the use of the method expression in this
+ // case. See issue 29389.
+
+ tfn := ir.NewFuncType(base.Pos, nil,
+ typecheck.NewFuncParams(t0.Params(), true),
+ typecheck.NewFuncParams(t0.Results(), false))
+
+ fn := typecheck.DeclFunc(sym, tfn)
+ fn.SetDupok(true)
+ fn.SetWrapper(true)
+
+ // Declare and initialize variable holding receiver.
+ ptr := ir.NewHiddenParam(base.Pos, fn, typecheck.Lookup(".this"), rcvrtype)
+
+ call := ir.NewCallExpr(base.Pos, ir.OCALL, ir.NewSelectorExpr(base.Pos, ir.OXDOT, ptr, meth), nil)
+ call.Args = ir.ParamNames(tfn.Type())
+ call.IsDDD = tfn.Type().IsVariadic()
+
+ var body ir.Node = call
+ if t0.NumResults() != 0 {
+ ret := ir.NewReturnStmt(base.Pos, nil)
+ ret.Results = []ir.Node{call}
+ body = ret
+ }
+
+ fn.Body = []ir.Node{body}
+ typecheck.FinishFuncBody()
+
+ typecheck.Func(fn)
+ // Need to typecheck the body of the just-generated wrapper.
+ // typecheckslice() requires that Curfn is set when processing an ORETURN.
+ ir.CurFunc = fn
+ typecheck.Stmts(fn.Body)
+ sym.Def = fn.Nname
+ typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
+ ir.CurFunc = savecurfn
+ base.Pos = saveLineNo
+
+ return fn.Nname
+}
diff --git a/src/cmd/compile/internal/walk/complit.go b/src/cmd/compile/internal/walk/complit.go
index abd920d646..e8e941dd91 100644
--- a/src/cmd/compile/internal/walk/complit.go
+++ b/src/cmd/compile/internal/walk/complit.go
@@ -218,11 +218,11 @@ func fixedlit(ctxt initContext, kind initKind, n *ir.CompLitExpr, var_ ir.Node,
case ir.OSTRUCTLIT:
splitnode = func(rn ir.Node) (ir.Node, ir.Node) {
r := rn.(*ir.StructKeyExpr)
- if r.Field.IsBlank() || isBlank {
+ if r.Sym().IsBlank() || isBlank {
return ir.BlankNode, r.Value
}
ir.SetPos(r)
- return ir.NewSelectorExpr(base.Pos, ir.ODOT, var_, r.Field), r.Value
+ return ir.NewSelectorExpr(base.Pos, ir.ODOT, var_, r.Sym()), r.Value
}
default:
base.Fatalf("fixedlit bad op: %v", n.Op())
@@ -440,8 +440,8 @@ func maplit(n *ir.CompLitExpr, m ir.Node, init *ir.Nodes) {
tk := types.NewArray(n.Type().Key(), int64(len(entries)))
te := types.NewArray(n.Type().Elem(), int64(len(entries)))
- tk.SetNoalg(true)
- te.SetNoalg(true)
+ // TODO(#47904): mark tk and te NoAlg here once the
+ // compiler/linker can handle NoAlg types correctly.
types.CalcSize(tk)
types.CalcSize(te)
diff --git a/src/cmd/compile/internal/walk/convert.go b/src/cmd/compile/internal/walk/convert.go
index 26e17a126f..27a07ce4b6 100644
--- a/src/cmd/compile/internal/walk/convert.go
+++ b/src/cmd/compile/internal/walk/convert.go
@@ -41,46 +41,98 @@ func walkConv(n *ir.ConvExpr, init *ir.Nodes) ir.Node {
// walkConvInterface walks an OCONVIFACE node.
func walkConvInterface(n *ir.ConvExpr, init *ir.Nodes) ir.Node {
+
n.X = walkExpr(n.X, init)
fromType := n.X.Type()
toType := n.Type()
-
- if !fromType.IsInterface() && !ir.IsBlank(ir.CurFunc.Nname) { // skip unnamed functions (func _())
+ if !fromType.IsInterface() && !ir.IsBlank(ir.CurFunc.Nname) {
+ // skip unnamed functions (func _())
reflectdata.MarkTypeUsedInInterface(fromType, ir.CurFunc.LSym)
}
- // typeword generates the type word of the interface value.
- typeword := func() ir.Node {
+ if !fromType.IsInterface() {
+ var typeWord ir.Node
if toType.IsEmptyInterface() {
- return reflectdata.TypePtr(fromType)
+ typeWord = reflectdata.TypePtr(fromType)
+ } else {
+ typeWord = reflectdata.ITabAddr(fromType, toType)
}
- return reflectdata.ITabAddr(fromType, toType)
- }
-
- // Optimize convT2E or convT2I as a two-word copy when T is pointer-shaped.
- if types.IsDirectIface(fromType) {
- l := ir.NewBinaryExpr(base.Pos, ir.OEFACE, typeword(), n.X)
+ l := ir.NewBinaryExpr(base.Pos, ir.OEFACE, typeWord, dataWord(n.X, init, n.Esc() != ir.EscNone))
l.SetType(toType)
l.SetTypecheck(n.Typecheck())
return l
}
+ if fromType.IsEmptyInterface() {
+ base.Fatalf("OCONVIFACE can't operate on an empty interface")
+ }
- // Optimize convT2{E,I} for many cases in which T is not pointer-shaped,
- // by using an existing addressable value identical to n.Left
- // or creating one on the stack.
+ // Evaluate the input interface.
+ c := typecheck.Temp(fromType)
+ init.Append(ir.NewAssignStmt(base.Pos, c, n.X))
+
+ // Grab its parts.
+ itab := ir.NewUnaryExpr(base.Pos, ir.OITAB, c)
+ itab.SetType(types.Types[types.TUINTPTR].PtrTo())
+ itab.SetTypecheck(1)
+ data := ir.NewUnaryExpr(base.Pos, ir.OIDATA, c)
+ data.SetType(types.Types[types.TUINT8].PtrTo()) // Type is generic pointer - we're just passing it through.
+ data.SetTypecheck(1)
+
+ var typeWord ir.Node
+ if toType.IsEmptyInterface() {
+ // Implement interface to empty interface conversion.
+ // res = itab
+ // if res != nil {
+ // res = res.type
+ // }
+ typeWord = typecheck.Temp(types.NewPtr(types.Types[types.TUINT8]))
+ init.Append(ir.NewAssignStmt(base.Pos, typeWord, itab))
+ nif := ir.NewIfStmt(base.Pos, typecheck.Expr(ir.NewBinaryExpr(base.Pos, ir.ONE, typeWord, typecheck.NodNil())), nil, nil)
+ nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, typeWord, itabType(typeWord))}
+ init.Append(nif)
+ } else {
+ // Must be converting I2I (more specific to less specific interface).
+ // res = convI2I(toType, itab)
+ fn := typecheck.LookupRuntime("convI2I")
+ types.CalcSize(fn.Type())
+ call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, nil)
+ call.Args = []ir.Node{reflectdata.TypePtr(toType), itab}
+ typeWord = walkExpr(typecheck.Expr(call), init)
+ }
+
+ // Build the result.
+ // e = iface{typeWord, data}
+ e := ir.NewBinaryExpr(base.Pos, ir.OEFACE, typeWord, data)
+ e.SetType(toType) // assign type manually, typecheck doesn't understand OEFACE.
+ e.SetTypecheck(1)
+ return e
+}
+
+// Returns the data word (the second word) used to represent n in an interface.
+// n must not be of interface type.
+// esc describes whether the result escapes.
+func dataWord(n ir.Node, init *ir.Nodes, escapes bool) ir.Node {
+ fromType := n.Type()
+
+ // If it's a pointer, it is its own representation.
+ if types.IsDirectIface(fromType) {
+ return n
+ }
+
+ // Try a bunch of cases to avoid an allocation.
var value ir.Node
switch {
case fromType.Size() == 0:
- // n.Left is zero-sized. Use zerobase.
- cheapExpr(n.X, init) // Evaluate n.Left for side-effects. See issue 19246.
+ // n is zero-sized. Use zerobase.
+ cheapExpr(n, init) // Evaluate n for side-effects. See issue 19246.
value = ir.NewLinksymExpr(base.Pos, ir.Syms.Zerobase, types.Types[types.TUINTPTR])
case fromType.IsBoolean() || (fromType.Size() == 1 && fromType.IsInteger()):
- // n.Left is a bool/byte. Use staticuint64s[n.Left * 8] on little-endian
- // and staticuint64s[n.Left * 8 + 7] on big-endian.
- n.X = cheapExpr(n.X, init)
- // byteindex widens n.Left so that the multiplication doesn't overflow.
- index := ir.NewBinaryExpr(base.Pos, ir.OLSH, byteindex(n.X), ir.NewInt(3))
+ // n is a bool/byte. Use staticuint64s[n * 8] on little-endian
+ // and staticuint64s[n * 8 + 7] on big-endian.
+ n = cheapExpr(n, init)
+ // byteindex widens n so that the multiplication doesn't overflow.
+ index := ir.NewBinaryExpr(base.Pos, ir.OLSH, byteindex(n), ir.NewInt(3))
if ssagen.Arch.LinkArch.ByteOrder == binary.BigEndian {
index = ir.NewBinaryExpr(base.Pos, ir.OADD, index, ir.NewInt(7))
}
@@ -90,118 +142,71 @@ func walkConvInterface(n *ir.ConvExpr, init *ir.Nodes) ir.Node {
xe := ir.NewIndexExpr(base.Pos, staticuint64s, index)
xe.SetBounded(true)
value = xe
- case n.X.Op() == ir.ONAME && n.X.(*ir.Name).Class == ir.PEXTERN && n.X.(*ir.Name).Readonly():
- // n.Left is a readonly global; use it directly.
- value = n.X
- case !fromType.IsInterface() && n.Esc() == ir.EscNone && fromType.Width <= 1024:
- // n.Left does not escape. Use a stack temporary initialized to n.Left.
+ case n.Op() == ir.ONAME && n.(*ir.Name).Class == ir.PEXTERN && n.(*ir.Name).Readonly():
+ // n is a readonly global; use it directly.
+ value = n
+ case !escapes && fromType.Width <= 1024:
+ // n does not escape. Use a stack temporary initialized to n.
value = typecheck.Temp(fromType)
- init.Append(typecheck.Stmt(ir.NewAssignStmt(base.Pos, value, n.X)))
+ init.Append(typecheck.Stmt(ir.NewAssignStmt(base.Pos, value, n)))
}
-
if value != nil {
- // Value is identical to n.Left.
- // Construct the interface directly: {type/itab, &value}.
- l := ir.NewBinaryExpr(base.Pos, ir.OEFACE, typeword(), typecheck.Expr(typecheck.NodAddr(value)))
- l.SetType(toType)
- l.SetTypecheck(n.Typecheck())
- return l
- }
-
- // Implement interface to empty interface conversion.
- // tmp = i.itab
- // if tmp != nil {
- // tmp = tmp.type
- // }
- // e = iface{tmp, i.data}
- if toType.IsEmptyInterface() && fromType.IsInterface() && !fromType.IsEmptyInterface() {
- // Evaluate the input interface.
- c := typecheck.Temp(fromType)
- init.Append(ir.NewAssignStmt(base.Pos, c, n.X))
-
- // Get the itab out of the interface.
- tmp := typecheck.Temp(types.NewPtr(types.Types[types.TUINT8]))
- init.Append(ir.NewAssignStmt(base.Pos, tmp, typecheck.Expr(ir.NewUnaryExpr(base.Pos, ir.OITAB, c))))
-
- // Get the type out of the itab.
- nif := ir.NewIfStmt(base.Pos, typecheck.Expr(ir.NewBinaryExpr(base.Pos, ir.ONE, tmp, typecheck.NodNil())), nil, nil)
- nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, tmp, itabType(tmp))}
- init.Append(nif)
-
- // Build the result.
- e := ir.NewBinaryExpr(base.Pos, ir.OEFACE, tmp, ifaceData(n.Pos(), c, types.NewPtr(types.Types[types.TUINT8])))
- e.SetType(toType) // assign type manually, typecheck doesn't understand OEFACE.
- e.SetTypecheck(1)
- return e
+ // The interface data word is &value.
+ return typecheck.Expr(typecheck.NodAddr(value))
}
- fnname, argType, needsaddr := convFuncName(fromType, toType)
-
- if !needsaddr && !fromType.IsInterface() {
- // Use a specialized conversion routine that only returns a data pointer.
- // ptr = convT2X(val)
- // e = iface{typ/tab, ptr}
- fn := typecheck.LookupRuntime(fnname)
- types.CalcSize(fromType)
+ // Time to do an allocation. We'll call into the runtime for that.
+ fnname, argType, needsaddr := dataWordFuncName(fromType)
+ fn := typecheck.LookupRuntime(fnname)
- arg := n.X
+ var args []ir.Node
+ if needsaddr {
+ // Types of large or unknown size are passed by reference.
+ // Orderexpr arranged for n to be a temporary for all
+ // the conversions it could see. Comparison of an interface
+ // with a non-interface, especially in a switch on interface value
+ // with non-interface cases, is not visible to order.stmt, so we
+ // have to fall back on allocating a temp here.
+ if !ir.IsAddressable(n) {
+ n = copyExpr(n, fromType, init)
+ }
+ fn = typecheck.SubstArgTypes(fn, fromType)
+ args = []ir.Node{reflectdata.TypePtr(fromType), typecheck.NodAddr(n)}
+ } else {
+ // Use a specialized conversion routine that takes the type being
+ // converted by value, not by pointer.
+ var arg ir.Node
switch {
case fromType == argType:
// already in the right type, nothing to do
+ arg = n
case fromType.Kind() == argType.Kind(),
fromType.IsPtrShaped() && argType.IsPtrShaped():
// can directly convert (e.g. named type to underlying type, or one pointer to another)
- arg = ir.NewConvExpr(n.Pos(), ir.OCONVNOP, argType, arg)
+ // TODO: never happens because pointers are directIface?
+ arg = ir.NewConvExpr(n.Pos(), ir.OCONVNOP, argType, n)
case fromType.IsInteger() && argType.IsInteger():
// can directly convert (e.g. int32 to uint32)
- arg = ir.NewConvExpr(n.Pos(), ir.OCONV, argType, arg)
+ arg = ir.NewConvExpr(n.Pos(), ir.OCONV, argType, n)
default:
// unsafe cast through memory
- arg = copyExpr(arg, arg.Type(), init)
+ arg = copyExpr(n, fromType, init)
var addr ir.Node = typecheck.NodAddr(arg)
addr = ir.NewConvExpr(n.Pos(), ir.OCONVNOP, argType.PtrTo(), addr)
arg = ir.NewStarExpr(n.Pos(), addr)
arg.SetType(argType)
}
-
- call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, nil)
- call.Args = []ir.Node{arg}
- e := ir.NewBinaryExpr(base.Pos, ir.OEFACE, typeword(), safeExpr(walkExpr(typecheck.Expr(call), init), init))
- e.SetType(toType)
- e.SetTypecheck(1)
- return e
- }
-
- var tab ir.Node
- if fromType.IsInterface() {
- // convI2I
- tab = reflectdata.TypePtr(toType)
- } else {
- // convT2x
- tab = typeword()
+ args = []ir.Node{arg}
}
-
- v := n.X
- if needsaddr {
- // Types of large or unknown size are passed by reference.
- // Orderexpr arranged for n.Left to be a temporary for all
- // the conversions it could see. Comparison of an interface
- // with a non-interface, especially in a switch on interface value
- // with non-interface cases, is not visible to order.stmt, so we
- // have to fall back on allocating a temp here.
- if !ir.IsAddressable(v) {
- v = copyExpr(v, v.Type(), init)
- }
- v = typecheck.NodAddr(v)
- }
-
- types.CalcSize(fromType)
- fn := typecheck.LookupRuntime(fnname)
- fn = typecheck.SubstArgTypes(fn, fromType, toType)
- types.CalcSize(fn.Type())
call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, nil)
- call.Args = []ir.Node{tab, v}
- return walkExpr(typecheck.Expr(call), init)
+ call.Args = args
+ return safeExpr(walkExpr(typecheck.Expr(call), init), init)
+}
+
+// walkConvIData walks an OCONVIDATA node.
+func walkConvIData(n *ir.ConvExpr, init *ir.Nodes) ir.Node {
+ n.X = walkExpr(n.X, init)
+ return dataWord(n.X, init, n.Esc() != ir.EscNone)
}
// walkBytesRunesToString walks an OBYTES2STR or ORUNES2STR node.
@@ -312,50 +317,35 @@ func walkStringToRunes(n *ir.ConvExpr, init *ir.Nodes) ir.Node {
return mkcall("stringtoslicerune", n.Type(), init, a, typecheck.Conv(n.X, types.Types[types.TSTRING]))
}
-// convFuncName builds the runtime function name for interface conversion.
-// It also returns the argument type that the runtime function takes, and
-// whether the function expects the data by address.
-// Not all names are possible. For example, we never generate convE2E or convE2I.
-func convFuncName(from, to *types.Type) (fnname string, argType *types.Type, needsaddr bool) {
- tkind := to.Tie()
- switch from.Tie() {
- case 'I':
- if tkind == 'I' {
- return "convI2I", types.Types[types.TINTER], false
- }
- case 'T':
+// dataWordFuncName returns the name of the function used to convert a value of type "from"
+// to the data word of an interface.
+// argType is the type the argument needs to be coerced to.
+// needsaddr reports whether the value should be passed (needaddr==false) or its address (needsaddr==true).
+func dataWordFuncName(from *types.Type) (fnname string, argType *types.Type, needsaddr bool) {
+ if from.IsInterface() {
+ base.Fatalf("can only handle non-interfaces")
+ }
+ switch {
+ case from.Size() == 2 && from.Align == 2:
+ return "convT16", types.Types[types.TUINT16], false
+ case from.Size() == 4 && from.Align == 4 && !from.HasPointers():
+ return "convT32", types.Types[types.TUINT32], false
+ case from.Size() == 8 && from.Align == types.Types[types.TUINT64].Align && !from.HasPointers():
+ return "convT64", types.Types[types.TUINT64], false
+ }
+ if sc := from.SoleComponent(); sc != nil {
switch {
- case from.Size() == 2 && from.Align == 2:
- return "convT16", types.Types[types.TUINT16], false
- case from.Size() == 4 && from.Align == 4 && !from.HasPointers():
- return "convT32", types.Types[types.TUINT32], false
- case from.Size() == 8 && from.Align == types.Types[types.TUINT64].Align && !from.HasPointers():
- return "convT64", types.Types[types.TUINT64], false
- }
- if sc := from.SoleComponent(); sc != nil {
- switch {
- case sc.IsString():
- return "convTstring", types.Types[types.TSTRING], false
- case sc.IsSlice():
- return "convTslice", types.NewSlice(types.Types[types.TUINT8]), false // the element type doesn't matter
- }
+ case sc.IsString():
+ return "convTstring", types.Types[types.TSTRING], false
+ case sc.IsSlice():
+ return "convTslice", types.NewSlice(types.Types[types.TUINT8]), false // the element type doesn't matter
}
+ }
- switch tkind {
- case 'E':
- if !from.HasPointers() {
- return "convT2Enoptr", types.Types[types.TUNSAFEPTR], true
- }
- return "convT2E", types.Types[types.TUNSAFEPTR], true
- case 'I':
- if !from.HasPointers() {
- return "convT2Inoptr", types.Types[types.TUNSAFEPTR], true
- }
- return "convT2I", types.Types[types.TUNSAFEPTR], true
- }
+ if from.HasPointers() {
+ return "convT", types.Types[types.TUNSAFEPTR], true
}
- base.Fatalf("unknown conv func %c2%c", from.Tie(), to.Tie())
- panic("unreachable")
+ return "convTnoptr", types.Types[types.TUNSAFEPTR], true
}
// rtconvfn returns the parameter and result types that will be used by a
@@ -462,7 +452,9 @@ func walkCheckPtrArithmetic(n *ir.ConvExpr, init *ir.Nodes) ir.Node {
// TODO(mdempsky): Make stricter. We only need to exempt
// reflect.Value.Pointer and reflect.Value.UnsafeAddr.
switch n.X.Op() {
- case ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER:
+ case ir.OCALLMETH:
+ base.FatalfAt(n.X.Pos(), "OCALLMETH missed by typecheck")
+ case ir.OCALLFUNC, ir.OCALLINTER:
return n
}
@@ -499,7 +491,7 @@ func walkCheckPtrArithmetic(n *ir.ConvExpr, init *ir.Nodes) ir.Node {
cheap := cheapExpr(n, init)
- slice := typecheck.MakeDotArgs(types.NewSlice(types.Types[types.TUNSAFEPTR]), originals)
+ slice := typecheck.MakeDotArgs(base.Pos, types.NewSlice(types.Types[types.TUNSAFEPTR]), originals)
slice.SetEsc(ir.EscNone)
init.Append(mkcall("checkptrArithmetic", nil, init, typecheck.ConvNop(cheap, types.Types[types.TUNSAFEPTR]), slice))
diff --git a/src/cmd/compile/internal/walk/expr.go b/src/cmd/compile/internal/walk/expr.go
index 2fb907710b..26e225440a 100644
--- a/src/cmd/compile/internal/walk/expr.go
+++ b/src/cmd/compile/internal/walk/expr.go
@@ -82,7 +82,7 @@ func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node {
base.Fatalf("walkExpr: switch 1 unknown op %+v", n.Op())
panic("unreachable")
- case ir.ONONAME, ir.OGETG:
+ case ir.OGETG, ir.OGETCALLERPC, ir.OGETCALLERSP:
return n
case ir.OTYPE, ir.ONAME, ir.OLITERAL, ir.ONIL, ir.OLINKSYMOFFSET:
@@ -136,6 +136,10 @@ func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node {
n := n.(*ir.TypeAssertExpr)
return walkDotType(n, init)
+ case ir.ODYNAMICDOTTYPE, ir.ODYNAMICDOTTYPE2:
+ n := n.(*ir.DynamicTypeAssertExpr)
+ return walkDynamicDotType(n, init)
+
case ir.OLEN, ir.OCAP:
n := n.(*ir.UnaryExpr)
return walkLenCap(n, init)
@@ -161,13 +165,13 @@ func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node {
n := n.(*ir.UnaryExpr)
return mkcall("gopanic", nil, init, n.X)
- case ir.ORECOVER:
- return walkRecover(n.(*ir.CallExpr), init)
+ case ir.ORECOVERFP:
+ return walkRecoverFP(n.(*ir.CallExpr), init)
case ir.OCFUNC:
return n
- case ir.OCALLINTER, ir.OCALLFUNC, ir.OCALLMETH:
+ case ir.OCALLINTER, ir.OCALLFUNC:
n := n.(*ir.CallExpr)
return walkCall(n, init)
@@ -206,6 +210,10 @@ func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node {
n := n.(*ir.ConvExpr)
return walkConvInterface(n, init)
+ case ir.OCONVIDATA:
+ n := n.(*ir.ConvExpr)
+ return walkConvIData(n, init)
+
case ir.OCONV, ir.OCONVNOP:
n := n.(*ir.ConvExpr)
return walkConv(n, init)
@@ -308,8 +316,8 @@ func walkExpr1(n ir.Node, init *ir.Nodes) ir.Node {
case ir.OCLOSURE:
return walkClosure(n.(*ir.ClosureExpr), init)
- case ir.OCALLPART:
- return walkCallPart(n.(*ir.SelectorExpr), init)
+ case ir.OMETHVALUE:
+ return walkMethodValue(n.(*ir.SelectorExpr), init)
}
// No return! Each case must return (or panic),
@@ -487,9 +495,12 @@ func walkAddString(n *ir.AddStringExpr, init *ir.Nodes) ir.Node {
return r1
}
-// walkCall walks an OCALLFUNC, OCALLINTER, or OCALLMETH node.
+// walkCall walks an OCALLFUNC or OCALLINTER node.
func walkCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
- if n.Op() == ir.OCALLINTER || n.Op() == ir.OCALLMETH {
+ if n.Op() == ir.OCALLMETH {
+ base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
+ }
+ if n.Op() == ir.OCALLINTER || n.X.Op() == ir.OMETHEXPR {
// We expect both interface call reflect.Type.Method and concrete
// call reflect.(*rtype).Method.
usemethod(n)
@@ -549,20 +560,8 @@ func walkCall1(n *ir.CallExpr, init *ir.Nodes) {
}
n.SetWalked(true)
- // If this is a method call t.M(...),
- // rewrite into a function call T.M(t, ...).
- // TODO(mdempsky): Do this right after type checking.
if n.Op() == ir.OCALLMETH {
- withRecv := make([]ir.Node, len(n.Args)+1)
- dot := n.X.(*ir.SelectorExpr)
- withRecv[0] = dot.X
- copy(withRecv[1:], n.Args)
- n.Args = withRecv
-
- dot = ir.NewSelectorExpr(dot.Pos(), ir.OXDOT, ir.TypeNode(dot.X.Type()), dot.Selection.Sym)
-
- n.SetOp(ir.OCALLFUNC)
- n.X = typecheck.Expr(dot)
+ base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
}
args := n.Args
@@ -671,6 +670,13 @@ func walkDotType(n *ir.TypeAssertExpr, init *ir.Nodes) ir.Node {
return n
}
+// walkDynamicdotType walks an ODYNAMICDOTTYPE or ODYNAMICDOTTYPE2 node.
+func walkDynamicDotType(n *ir.DynamicTypeAssertExpr, init *ir.Nodes) ir.Node {
+ n.X = walkExpr(n.X, init)
+ n.T = walkExpr(n.T, init)
+ return n
+}
+
// walkIndex walks an OINDEX node.
func walkIndex(n *ir.IndexExpr, init *ir.Nodes) ir.Node {
n.X = walkExpr(n.X, init)
@@ -931,56 +937,55 @@ func bounded(n ir.Node, max int64) bool {
return false
}
-// usemethod checks interface method calls for uses of reflect.Type.Method.
+// usemethod checks calls for uses of reflect.Type.{Method,MethodByName}.
func usemethod(n *ir.CallExpr) {
- t := n.X.Type()
+ // Don't mark reflect.(*rtype).Method, etc. themselves in the reflect package.
+ // Those functions may be alive via the itab, which should not cause all methods
+ // alive. We only want to mark their callers.
+ if base.Ctxt.Pkgpath == "reflect" {
+ switch ir.CurFunc.Nname.Sym().Name { // TODO: is there a better way than hardcoding the names?
+ case "(*rtype).Method", "(*rtype).MethodByName", "(*interfaceType).Method", "(*interfaceType).MethodByName":
+ return
+ }
+ }
- // Looking for either of:
- // Method(int) reflect.Method
- // MethodByName(string) (reflect.Method, bool)
- //
- // TODO(crawshaw): improve precision of match by working out
- // how to check the method name.
- if n := t.NumParams(); n != 1 {
+ dot, ok := n.X.(*ir.SelectorExpr)
+ if !ok {
return
}
- if n := t.NumResults(); n != 1 && n != 2 {
+
+ // Looking for either direct method calls and interface method calls of:
+ // reflect.Type.Method - func(int) reflect.Method
+ // reflect.Type.MethodByName - func(string) (reflect.Method, bool)
+ var pKind types.Kind
+
+ switch dot.Sel.Name {
+ case "Method":
+ pKind = types.TINT
+ case "MethodByName":
+ pKind = types.TSTRING
+ default:
return
}
- p0 := t.Params().Field(0)
- res0 := t.Results().Field(0)
- var res1 *types.Field
- if t.NumResults() == 2 {
- res1 = t.Results().Field(1)
- }
- if res1 == nil {
- if p0.Type.Kind() != types.TINT {
- return
- }
- } else {
- if !p0.Type.IsString() {
- return
- }
- if !res1.Type.IsBoolean() {
- return
- }
+ t := dot.Selection.Type
+ if t.NumParams() != 1 || t.Params().Field(0).Type.Kind() != pKind {
+ return
}
-
- // Don't mark reflect.(*rtype).Method, etc. themselves in the reflect package.
- // Those functions may be alive via the itab, which should not cause all methods
- // alive. We only want to mark their callers.
- if base.Ctxt.Pkgpath == "reflect" {
- switch ir.CurFunc.Nname.Sym().Name { // TODO: is there a better way than hardcoding the names?
- case "(*rtype).Method", "(*rtype).MethodByName", "(*interfaceType).Method", "(*interfaceType).MethodByName":
+ switch t.NumResults() {
+ case 1:
+ // ok
+ case 2:
+ if t.Results().Field(1).Type.Kind() != types.TBOOL {
return
}
+ default:
+ return
}
- // Note: Don't rely on res0.Type.String() since its formatting depends on multiple factors
- // (including global variables such as numImports - was issue #19028).
- // Also need to check for reflect package itself (see Issue #38515).
- if s := res0.Type.Sym(); s != nil && s.Name == "Method" && types.IsReflectPkg(s.Pkg) {
+ // Check that first result type is "reflect.Method". Note that we have to check sym name and sym package
+ // separately, as we can't check for exact string "reflect.Method" reliably (e.g., see #19028 and #38515).
+ if s := t.Results().Field(0).Type.Sym(); s != nil && s.Name == "Method" && types.IsReflectPkg(s.Pkg) {
ir.CurFunc.SetReflectMethod(true)
// The LSym is initialized at this point. We need to set the attribute on the LSym.
ir.CurFunc.LSym.Set(obj.AttrReflectMethod, true)
diff --git a/src/cmd/compile/internal/walk/order.go b/src/cmd/compile/internal/walk/order.go
index b733d3a29f..6e336f565c 100644
--- a/src/cmd/compile/internal/walk/order.go
+++ b/src/cmd/compile/internal/walk/order.go
@@ -7,10 +7,8 @@ package walk
import (
"fmt"
"go/constant"
- "internal/buildcfg"
"cmd/compile/internal/base"
- "cmd/compile/internal/escape"
"cmd/compile/internal/ir"
"cmd/compile/internal/reflectdata"
"cmd/compile/internal/staticinit"
@@ -53,7 +51,7 @@ import (
type orderState struct {
out []ir.Node // list of generated statements
temp []*ir.Name // stack of temporary variables
- free map[string][]*ir.Name // free list of unused temporaries, by type.LongString().
+ free map[string][]*ir.Name // free list of unused temporaries, by type.LinkString().
edit func(ir.Node) ir.Node // cached closure of o.exprNoLHS
}
@@ -78,20 +76,14 @@ func (o *orderState) append(stmt ir.Node) {
// If clear is true, newTemp emits code to zero the temporary.
func (o *orderState) newTemp(t *types.Type, clear bool) *ir.Name {
var v *ir.Name
- // Note: LongString is close to the type equality we want,
- // but not exactly. We still need to double-check with types.Identical.
- key := t.LongString()
- a := o.free[key]
- for i, n := range a {
- if types.Identical(t, n.Type()) {
- v = a[i]
- a[i] = a[len(a)-1]
- a = a[:len(a)-1]
- o.free[key] = a
- break
+ key := t.LinkString()
+ if a := o.free[key]; len(a) > 0 {
+ v = a[len(a)-1]
+ if !types.Identical(t, v.Type()) {
+ base.Fatalf("expected %L to have type %v", v, t)
}
- }
- if v == nil {
+ o.free[key] = a[:len(a)-1]
+ } else {
v = typecheck.Temp(t)
}
if clear {
@@ -372,7 +364,7 @@ func (o *orderState) markTemp() ordermarker {
// which must have been returned by markTemp.
func (o *orderState) popTemp(mark ordermarker) {
for _, n := range o.temp[mark:] {
- key := n.Type().LongString()
+ key := n.Type().LinkString()
o.free[key] = append(o.free[key], n)
}
o.temp = o.temp[:mark]
@@ -514,15 +506,18 @@ func (o *orderState) init(n ir.Node) {
}
// call orders the call expression n.
-// n.Op is OCALLMETH/OCALLFUNC/OCALLINTER or a builtin like OCOPY.
+// n.Op is OCALLFUNC/OCALLINTER or a builtin like OCOPY.
func (o *orderState) call(nn ir.Node) {
if len(nn.Init()) > 0 {
// Caller should have already called o.init(nn).
base.Fatalf("%v with unexpected ninit", nn.Op())
}
+ if nn.Op() == ir.OCALLMETH {
+ base.FatalfAt(nn.Pos(), "OCALLMETH missed by typecheck")
+ }
// Builtin functions.
- if nn.Op() != ir.OCALLFUNC && nn.Op() != ir.OCALLMETH && nn.Op() != ir.OCALLINTER {
+ if nn.Op() != ir.OCALLFUNC && nn.Op() != ir.OCALLINTER {
switch n := nn.(type) {
default:
base.Fatalf("unexpected call: %+v", n)
@@ -554,39 +549,6 @@ func (o *orderState) call(nn ir.Node) {
n.X = o.expr(n.X, nil)
o.exprList(n.Args)
-
- if n.Op() == ir.OCALLINTER {
- return
- }
- keepAlive := func(arg ir.Node) {
- // If the argument is really a pointer being converted to uintptr,
- // arrange for the pointer to be kept alive until the call returns,
- // by copying it into a temp and marking that temp
- // still alive when we pop the temp stack.
- if arg.Op() == ir.OCONVNOP {
- arg := arg.(*ir.ConvExpr)
- if arg.X.Type().IsUnsafePtr() {
- x := o.copyExpr(arg.X)
- arg.X = x
- x.SetAddrtaken(true) // ensure SSA keeps the x variable
- n.KeepAlive = append(n.KeepAlive, x)
- }
- }
- }
-
- // Check for "unsafe-uintptr" tag provided by escape analysis.
- for i, param := range n.X.Type().Params().FieldSlice() {
- if param.Note == escape.UnsafeUintptrNote || param.Note == escape.UintptrEscapesNote {
- if arg := n.Args[i]; arg.Op() == ir.OSLICELIT {
- arg := arg.(*ir.CompLitExpr)
- for _, elt := range arg.List {
- keepAlive(elt)
- }
- } else {
- keepAlive(arg)
- }
- }
- }
}
// mapAssign appends n to o.out.
@@ -693,9 +655,20 @@ func (o *orderState) stmt(n ir.Node) {
n := n.(*ir.AssignListStmt)
t := o.markTemp()
o.exprList(n.Lhs)
- o.init(n.Rhs[0])
- o.call(n.Rhs[0])
- o.as2func(n)
+ call := n.Rhs[0]
+ o.init(call)
+ if ic, ok := call.(*ir.InlinedCallExpr); ok {
+ o.stmtList(ic.Body)
+
+ n.SetOp(ir.OAS2)
+ n.Rhs = ic.ReturnVars
+
+ o.exprList(n.Rhs)
+ o.out = append(o.out, n)
+ } else {
+ o.call(call)
+ o.as2func(n)
+ }
o.cleanTemp(t)
// Special: use temporary variables to hold result,
@@ -713,6 +686,10 @@ func (o *orderState) stmt(n ir.Node) {
case ir.ODOTTYPE2:
r := r.(*ir.TypeAssertExpr)
r.X = o.expr(r.X, nil)
+ case ir.ODYNAMICDOTTYPE2:
+ r := r.(*ir.DynamicTypeAssertExpr)
+ r.X = o.expr(r.X, nil)
+ r.T = o.expr(r.T, nil)
case ir.ORECV:
r := r.(*ir.UnaryExpr)
r.X = o.expr(r.X, nil)
@@ -748,14 +725,25 @@ func (o *orderState) stmt(n ir.Node) {
o.out = append(o.out, n)
// Special: handle call arguments.
- case ir.OCALLFUNC, ir.OCALLINTER, ir.OCALLMETH:
+ case ir.OCALLFUNC, ir.OCALLINTER:
n := n.(*ir.CallExpr)
t := o.markTemp()
o.call(n)
o.out = append(o.out, n)
o.cleanTemp(t)
- case ir.OCLOSE, ir.ORECV:
+ case ir.OINLCALL:
+ n := n.(*ir.InlinedCallExpr)
+ o.stmtList(n.Body)
+
+ // discard results; double-check for no side effects
+ for _, result := range n.ReturnVars {
+ if staticinit.AnySideEffects(result) {
+ base.FatalfAt(result.Pos(), "inlined call result has side effects: %v", result)
+ }
+ }
+
+ case ir.OCHECKNIL, ir.OCLOSE, ir.OPANIC, ir.ORECV:
n := n.(*ir.UnaryExpr)
t := o.markTemp()
n.X = o.expr(n.X, nil)
@@ -770,10 +758,10 @@ func (o *orderState) stmt(n ir.Node) {
o.out = append(o.out, n)
o.cleanTemp(t)
- case ir.OPRINT, ir.OPRINTN, ir.ORECOVER:
+ case ir.OPRINT, ir.OPRINTN, ir.ORECOVERFP:
n := n.(*ir.CallExpr)
t := o.markTemp()
- o.exprList(n.Args)
+ o.call(n)
o.out = append(o.out, n)
o.cleanTemp(t)
@@ -783,16 +771,6 @@ func (o *orderState) stmt(n ir.Node) {
t := o.markTemp()
o.init(n.Call)
o.call(n.Call)
- if n.Call.Op() == ir.ORECOVER {
- // Special handling of "defer recover()". We need to evaluate the FP
- // argument before wrapping.
- var init ir.Nodes
- n.Call = walkRecover(n.Call.(*ir.CallExpr), &init)
- o.stmtList(init)
- }
- if buildcfg.Experiment.RegabiDefer {
- o.wrapGoDefer(n)
- }
o.out = append(o.out, n)
o.cleanTemp(t)
@@ -830,16 +808,6 @@ func (o *orderState) stmt(n ir.Node) {
orderBlock(&n.Else, o.free)
o.out = append(o.out, n)
- case ir.OPANIC:
- n := n.(*ir.UnaryExpr)
- t := o.markTemp()
- n.X = o.expr(n.X, nil)
- if !n.X.Type().IsEmptyInterface() {
- base.FatalfAt(n.Pos(), "bad argument to panic: %L", n.X)
- }
- o.out = append(o.out, n)
- o.cleanTemp(t)
-
case ir.ORANGE:
// n.Right is the expression being ranged over.
// order it, and then make a copy if we need one.
@@ -1192,23 +1160,26 @@ func (o *orderState) expr1(n, lhs ir.Node) ir.Node {
// concrete type (not interface) argument might need an addressable
// temporary to pass to the runtime conversion routine.
- case ir.OCONVIFACE:
+ case ir.OCONVIFACE, ir.OCONVIDATA:
n := n.(*ir.ConvExpr)
n.X = o.expr(n.X, nil)
if n.X.Type().IsInterface() {
return n
}
- if _, _, needsaddr := convFuncName(n.X.Type(), n.Type()); needsaddr || isStaticCompositeLiteral(n.X) {
+ if _, _, needsaddr := dataWordFuncName(n.X.Type()); needsaddr || isStaticCompositeLiteral(n.X) {
// Need a temp if we need to pass the address to the conversion function.
// We also process static composite literal node here, making a named static global
- // whose address we can put directly in an interface (see OCONVIFACE case in walk).
+ // whose address we can put directly in an interface (see OCONVIFACE/OCONVIDATA case in walk).
n.X = o.addrTemp(n.X)
}
return n
case ir.OCONVNOP:
n := n.(*ir.ConvExpr)
- if n.Type().IsKind(types.TUNSAFEPTR) && n.X.Type().IsKind(types.TUINTPTR) && (n.X.Op() == ir.OCALLFUNC || n.X.Op() == ir.OCALLINTER || n.X.Op() == ir.OCALLMETH) {
+ if n.X.Op() == ir.OCALLMETH {
+ base.FatalfAt(n.X.Pos(), "OCALLMETH missed by typecheck")
+ }
+ if n.Type().IsKind(types.TUNSAFEPTR) && n.X.Type().IsKind(types.TUINTPTR) && (n.X.Op() == ir.OCALLFUNC || n.X.Op() == ir.OCALLINTER) {
call := n.X.(*ir.CallExpr)
// When reordering unsafe.Pointer(f()) into a separate
// statement, the conversion and function call must stay
@@ -1261,9 +1232,12 @@ func (o *orderState) expr1(n, lhs ir.Node) ir.Node {
o.out = append(o.out, nif)
return r
+ case ir.OCALLMETH:
+ base.FatalfAt(n.Pos(), "OCALLMETH missed by typecheck")
+ panic("unreachable")
+
case ir.OCALLFUNC,
ir.OCALLINTER,
- ir.OCALLMETH,
ir.OCAP,
ir.OCOMPLEX,
ir.OCOPY,
@@ -1275,7 +1249,7 @@ func (o *orderState) expr1(n, lhs ir.Node) ir.Node {
ir.OMAKESLICECOPY,
ir.ONEW,
ir.OREAL,
- ir.ORECOVER,
+ ir.ORECOVERFP,
ir.OSTR2BYTES,
ir.OSTR2BYTESTMP,
ir.OSTR2RUNES:
@@ -1293,6 +1267,11 @@ func (o *orderState) expr1(n, lhs ir.Node) ir.Node {
}
return n
+ case ir.OINLCALL:
+ n := n.(*ir.InlinedCallExpr)
+ o.stmtList(n.Body)
+ return n.SingleResult()
+
case ir.OAPPEND:
// Check for append(x, make([]T, y)...) .
n := n.(*ir.CallExpr)
@@ -1327,11 +1306,11 @@ func (o *orderState) expr1(n, lhs ir.Node) ir.Node {
}
return n
- case ir.OCALLPART:
+ case ir.OMETHVALUE:
n := n.(*ir.SelectorExpr)
n.X = o.expr(n.X, nil)
if n.Transient() {
- t := typecheck.PartialCallType(n)
+ t := typecheck.MethodValueType(n)
n.Prealloc = o.newTemp(t, false)
}
return n
@@ -1498,313 +1477,6 @@ func (o *orderState) as2ok(n *ir.AssignListStmt) {
o.stmt(typecheck.Stmt(as))
}
-var wrapGoDefer_prgen int
-
-// wrapGoDefer wraps the target of a "go" or "defer" statement with a
-// new "function with no arguments" closure. Specifically, it converts
-//
-// defer f(x, y)
-//
-// to
-//
-// x1, y1 := x, y
-// defer func() { f(x1, y1) }()
-//
-// This is primarily to enable a quicker bringup of defers under the
-// new register ABI; by doing this conversion, we can simplify the
-// code in the runtime that invokes defers on the panic path.
-func (o *orderState) wrapGoDefer(n *ir.GoDeferStmt) {
- call := n.Call
-
- var callX ir.Node // thing being called
- var callArgs []ir.Node // call arguments
- var keepAlive []*ir.Name // KeepAlive list from call, if present
-
- // A helper to recreate the call within the closure.
- var mkNewCall func(pos src.XPos, op ir.Op, fun ir.Node, args []ir.Node) ir.Node
-
- // Defer calls come in many shapes and sizes; not all of them
- // are ir.CallExpr's. Examine the type to see what we're dealing with.
- switch x := call.(type) {
- case *ir.CallExpr:
- callX = x.X
- callArgs = x.Args
- keepAlive = x.KeepAlive
- mkNewCall = func(pos src.XPos, op ir.Op, fun ir.Node, args []ir.Node) ir.Node {
- newcall := ir.NewCallExpr(pos, op, fun, args)
- newcall.IsDDD = x.IsDDD
- return ir.Node(newcall)
- }
- case *ir.UnaryExpr: // ex: OCLOSE
- callArgs = []ir.Node{x.X}
- mkNewCall = func(pos src.XPos, op ir.Op, fun ir.Node, args []ir.Node) ir.Node {
- if len(args) != 1 {
- panic("internal error, expecting single arg")
- }
- return ir.Node(ir.NewUnaryExpr(pos, op, args[0]))
- }
- case *ir.BinaryExpr: // ex: OCOPY
- callArgs = []ir.Node{x.X, x.Y}
- mkNewCall = func(pos src.XPos, op ir.Op, fun ir.Node, args []ir.Node) ir.Node {
- if len(args) != 2 {
- panic("internal error, expecting two args")
- }
- return ir.Node(ir.NewBinaryExpr(pos, op, args[0], args[1]))
- }
- default:
- panic("unhandled op")
- }
-
- // No need to wrap if called func has no args, no receiver, and no results.
- // However in the case of "defer func() { ... }()" we need to
- // protect against the possibility of directClosureCall rewriting
- // things so that the call does have arguments.
- //
- // Do wrap method calls (OCALLMETH, OCALLINTER), because it has
- // a receiver.
- //
- // Also do wrap builtin functions, because they may be expanded to
- // calls with arguments (e.g. ORECOVER).
- //
- // TODO: maybe not wrap if the called function has no arguments and
- // only in-register results?
- if len(callArgs) == 0 && call.Op() == ir.OCALLFUNC && callX.Type().NumResults() == 0 {
- if c, ok := call.(*ir.CallExpr); ok && callX != nil && callX.Op() == ir.OCLOSURE {
- cloFunc := callX.(*ir.ClosureExpr).Func
- cloFunc.SetClosureCalled(false)
- c.PreserveClosure = true
- }
- return
- }
-
- if c, ok := call.(*ir.CallExpr); ok {
- // To simplify things, turn f(a, b, []T{c, d, e}...) back
- // into f(a, b, c, d, e) -- when the final call is run through the
- // type checker below, it will rebuild the proper slice literal.
- undoVariadic(c)
- callX = c.X
- callArgs = c.Args
- }
-
- // This is set to true if the closure we're generating escapes
- // (needs heap allocation).
- cloEscapes := func() bool {
- if n.Op() == ir.OGO {
- // For "go", assume that all closures escape.
- return true
- }
- // For defer, just use whatever result escape analysis
- // has determined for the defer.
- return n.Esc() != ir.EscNever
- }()
-
- // A helper for making a copy of an argument. Note that it is
- // not safe to use o.copyExpr(arg) if we're putting a
- // reference to the temp into the closure (as opposed to
- // copying it in by value), since in the by-reference case we
- // need a temporary whose lifetime extends to the end of the
- // function (as opposed to being local to the current block or
- // statement being ordered).
- mkArgCopy := func(arg ir.Node) *ir.Name {
- t := arg.Type()
- byval := t.Size() <= 128 || cloEscapes
- var argCopy *ir.Name
- if byval {
- argCopy = o.copyExpr(arg)
- } else {
- argCopy = typecheck.Temp(t)
- o.append(ir.NewAssignStmt(base.Pos, argCopy, arg))
- }
- // The value of 128 below is meant to be consistent with code
- // in escape analysis that picks byval/byaddr based on size.
- argCopy.SetByval(byval)
- return argCopy
- }
-
- // getUnsafeArg looks for an unsafe.Pointer arg that has been
- // previously captured into the call's keepalive list, returning
- // the name node for it if found.
- getUnsafeArg := func(arg ir.Node) *ir.Name {
- // Look for uintptr(unsafe.Pointer(name))
- if arg.Op() != ir.OCONVNOP {
- return nil
- }
- if !arg.Type().IsUintptr() {
- return nil
- }
- if !arg.(*ir.ConvExpr).X.Type().IsUnsafePtr() {
- return nil
- }
- arg = arg.(*ir.ConvExpr).X
- argname, ok := arg.(*ir.Name)
- if !ok {
- return nil
- }
- for i := range keepAlive {
- if argname == keepAlive[i] {
- return argname
- }
- }
- return nil
- }
-
- // Copy the arguments to the function into temps.
- //
- // For calls with uintptr(unsafe.Pointer(...)) args that are being
- // kept alive (see code in (*orderState).call that does this), use
- // the existing arg copy instead of creating a new copy.
- unsafeArgs := make([]*ir.Name, len(callArgs))
- origArgs := callArgs
- var newNames []*ir.Name
- for i := range callArgs {
- arg := callArgs[i]
- var argname *ir.Name
- unsafeArgName := getUnsafeArg(arg)
- if unsafeArgName != nil {
- // arg has been copied already, use keepalive copy
- argname = unsafeArgName
- unsafeArgs[i] = unsafeArgName
- } else {
- argname = mkArgCopy(arg)
- }
- newNames = append(newNames, argname)
- }
-
- // Deal with cases where the function expression (what we're
- // calling) is not a simple function symbol.
- var fnExpr *ir.Name
- var methSelectorExpr *ir.SelectorExpr
- if callX != nil {
- switch {
- case callX.Op() == ir.ODOTMETH || callX.Op() == ir.ODOTINTER:
- // Handle defer of a method call, e.g. "defer v.MyMethod(x, y)"
- n := callX.(*ir.SelectorExpr)
- n.X = mkArgCopy(n.X)
- methSelectorExpr = n
- if callX.Op() == ir.ODOTINTER {
- // Currently for "defer i.M()" if i is nil it panics at the
- // point of defer statement, not when deferred function is called.
- // (I think there is an issue discussing what is the intended
- // behavior but I cannot find it.)
- // We need to do the nil check outside of the wrapper.
- tab := typecheck.Expr(ir.NewUnaryExpr(base.Pos, ir.OITAB, n.X))
- c := ir.NewUnaryExpr(n.Pos(), ir.OCHECKNIL, tab)
- c.SetTypecheck(1)
- o.append(c)
- }
- case !(callX.Op() == ir.ONAME && callX.(*ir.Name).Class == ir.PFUNC):
- // Deal with "defer returnsafunc()(x, y)" (for
- // example) by copying the callee expression.
- fnExpr = mkArgCopy(callX)
- if callX.Op() == ir.OCLOSURE {
- // For "defer func(...)", in addition to copying the
- // closure into a temp, mark it as no longer directly
- // called.
- callX.(*ir.ClosureExpr).Func.SetClosureCalled(false)
- }
- }
- }
-
- // Create a new no-argument function that we'll hand off to defer.
- var noFuncArgs []*ir.Field
- noargst := ir.NewFuncType(base.Pos, nil, noFuncArgs, nil)
- wrapGoDefer_prgen++
- outerfn := ir.CurFunc
- wrapname := fmt.Sprintf("%v·dwrap·%d", outerfn, wrapGoDefer_prgen)
- sym := types.LocalPkg.Lookup(wrapname)
- fn := typecheck.DeclFunc(sym, noargst)
- fn.SetIsHiddenClosure(true)
- fn.SetWrapper(true)
-
- // helper for capturing reference to a var declared in an outer scope.
- capName := func(pos src.XPos, fn *ir.Func, n *ir.Name) *ir.Name {
- t := n.Type()
- cv := ir.CaptureName(pos, fn, n)
- cv.SetType(t)
- return typecheck.Expr(cv).(*ir.Name)
- }
-
- // Call args (x1, y1) need to be captured as part of the newly
- // created closure.
- newCallArgs := []ir.Node{}
- for i := range newNames {
- var arg ir.Node
- arg = capName(callArgs[i].Pos(), fn, newNames[i])
- if unsafeArgs[i] != nil {
- arg = ir.NewConvExpr(arg.Pos(), origArgs[i].Op(), origArgs[i].Type(), arg)
- }
- newCallArgs = append(newCallArgs, arg)
- }
- // Also capture the function or method expression (if needed) into
- // the closure.
- if fnExpr != nil {
- callX = capName(callX.Pos(), fn, fnExpr)
- }
- if methSelectorExpr != nil {
- methSelectorExpr.X = capName(callX.Pos(), fn, methSelectorExpr.X.(*ir.Name))
- }
- ir.FinishCaptureNames(n.Pos(), outerfn, fn)
-
- // This flags a builtin as opposed to a regular call.
- irregular := (call.Op() != ir.OCALLFUNC &&
- call.Op() != ir.OCALLMETH &&
- call.Op() != ir.OCALLINTER)
-
- // Construct new function body: f(x1, y1)
- op := ir.OCALL
- if irregular {
- op = call.Op()
- }
- newcall := mkNewCall(call.Pos(), op, callX, newCallArgs)
-
- // Type-check the result.
- if !irregular {
- typecheck.Call(newcall.(*ir.CallExpr))
- } else {
- typecheck.Stmt(newcall)
- }
-
- // Finalize body, register function on the main decls list.
- fn.Body = []ir.Node{newcall}
- typecheck.FinishFuncBody()
- typecheck.Func(fn)
- typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
-
- // Create closure expr
- clo := ir.NewClosureExpr(n.Pos(), fn)
- fn.OClosure = clo
- clo.SetType(fn.Type())
-
- // Set escape properties for closure.
- if n.Op() == ir.OGO {
- // For "go", assume that the closure is going to escape
- // (with an exception for the runtime, which doesn't
- // permit heap-allocated closures).
- if base.Ctxt.Pkgpath != "runtime" {
- clo.SetEsc(ir.EscHeap)
- }
- } else {
- // For defer, just use whatever result escape analysis
- // has determined for the defer.
- if n.Esc() == ir.EscNever {
- clo.SetTransient(true)
- clo.SetEsc(ir.EscNone)
- }
- }
-
- // Create new top level call to closure over argless function.
- topcall := ir.NewCallExpr(n.Pos(), ir.OCALL, clo, []ir.Node{})
- typecheck.Call(topcall)
-
- // Tag the call to insure that directClosureCall doesn't undo our work.
- topcall.PreserveClosure = true
-
- fn.SetClosureCalled(false)
-
- // Finally, point the defer statement at the newly generated call.
- n.Call = topcall
-}
-
// isFuncPCIntrinsic returns whether n is a direct call of internal/abi.FuncPCABIxxx functions.
func isFuncPCIntrinsic(n *ir.CallExpr) bool {
if n.Op() != ir.OCALLFUNC || n.X.Op() != ir.ONAME {
diff --git a/src/cmd/compile/internal/walk/stmt.go b/src/cmd/compile/internal/walk/stmt.go
index 0bf76680c4..4581bca3df 100644
--- a/src/cmd/compile/internal/walk/stmt.go
+++ b/src/cmd/compile/internal/walk/stmt.go
@@ -7,7 +7,6 @@ package walk
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
- "cmd/compile/internal/typecheck"
)
// The result of walkStmt MUST be assigned back to n, e.g.
@@ -41,7 +40,6 @@ func walkStmt(n ir.Node) ir.Node {
ir.OAS2MAPR,
ir.OCLOSE,
ir.OCOPY,
- ir.OCALLMETH,
ir.OCALLINTER,
ir.OCALL,
ir.OCALLFUNC,
@@ -50,7 +48,7 @@ func walkStmt(n ir.Node) ir.Node {
ir.OPRINT,
ir.OPRINTN,
ir.OPANIC,
- ir.ORECOVER,
+ ir.ORECOVERFP,
ir.OGETG:
if n.Typecheck() == 0 {
base.Fatalf("missing typecheck: %+v", n)
@@ -187,33 +185,28 @@ func walkFor(n *ir.ForStmt) ir.Node {
return n
}
+// validGoDeferCall reports whether call is a valid call to appear in
+// a go or defer statement; that is, whether it's a regular function
+// call without arguments or results.
+func validGoDeferCall(call ir.Node) bool {
+ if call, ok := call.(*ir.CallExpr); ok && call.Op() == ir.OCALLFUNC && len(call.KeepAlive) == 0 {
+ sig := call.X.Type()
+ return sig.NumParams()+sig.NumResults() == 0
+ }
+ return false
+}
+
// walkGoDefer walks an OGO or ODEFER node.
func walkGoDefer(n *ir.GoDeferStmt) ir.Node {
- var init ir.Nodes
- switch call := n.Call; call.Op() {
- case ir.OPRINT, ir.OPRINTN:
- call := call.(*ir.CallExpr)
- n.Call = wrapCall(call, &init)
-
- case ir.ODELETE:
- call := call.(*ir.CallExpr)
- n.Call = wrapCall(call, &init)
+ if !validGoDeferCall(n.Call) {
+ base.FatalfAt(n.Pos(), "invalid %v call: %v", n.Op(), n.Call)
+ }
- case ir.OCOPY:
- call := call.(*ir.BinaryExpr)
- n.Call = walkCopy(call, &init, true)
+ var init ir.Nodes
- case ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER:
- call := call.(*ir.CallExpr)
- if len(call.KeepAlive) > 0 {
- n.Call = wrapCall(call, &init)
- } else {
- n.Call = walkExpr(call, &init)
- }
+ call := n.Call.(*ir.CallExpr)
+ call.X = walkExpr(call.X, &init)
- default:
- n.Call = walkExpr(call, &init)
- }
if len(init) > 0 {
init.Append(n)
return ir.NewBlockStmt(n.Pos(), init)
@@ -228,110 +221,3 @@ func walkIf(n *ir.IfStmt) ir.Node {
walkStmtList(n.Else)
return n
}
-
-// Rewrite
-// go builtin(x, y, z)
-// into
-// go func(a1, a2, a3) {
-// builtin(a1, a2, a3)
-// }(x, y, z)
-// for print, println, and delete.
-//
-// Rewrite
-// go f(x, y, uintptr(unsafe.Pointer(z)))
-// into
-// go func(a1, a2, a3) {
-// f(a1, a2, uintptr(a3))
-// }(x, y, unsafe.Pointer(z))
-// for function contains unsafe-uintptr arguments.
-
-var wrapCall_prgen int
-
-// The result of wrapCall MUST be assigned back to n, e.g.
-// n.Left = wrapCall(n.Left, init)
-func wrapCall(n *ir.CallExpr, init *ir.Nodes) ir.Node {
- if len(n.Init()) != 0 {
- walkStmtList(n.Init())
- init.Append(ir.TakeInit(n)...)
- }
-
- isBuiltinCall := n.Op() != ir.OCALLFUNC && n.Op() != ir.OCALLMETH && n.Op() != ir.OCALLINTER
-
- // Turn f(a, b, []T{c, d, e}...) back into f(a, b, c, d, e).
- if !isBuiltinCall && n.IsDDD {
- undoVariadic(n)
- }
-
- wrapArgs := n.Args
- // If there's a receiver argument, it needs to be passed through the wrapper too.
- if n.Op() == ir.OCALLMETH || n.Op() == ir.OCALLINTER {
- recv := n.X.(*ir.SelectorExpr).X
- wrapArgs = append([]ir.Node{recv}, wrapArgs...)
- }
-
- // origArgs keeps track of what argument is uintptr-unsafe/unsafe-uintptr conversion.
- origArgs := make([]ir.Node, len(wrapArgs))
- var funcArgs []*ir.Field
- for i, arg := range wrapArgs {
- s := typecheck.LookupNum("a", i)
- if !isBuiltinCall && arg.Op() == ir.OCONVNOP && arg.Type().IsUintptr() && arg.(*ir.ConvExpr).X.Type().IsUnsafePtr() {
- origArgs[i] = arg
- arg = arg.(*ir.ConvExpr).X
- wrapArgs[i] = arg
- }
- funcArgs = append(funcArgs, ir.NewField(base.Pos, s, nil, arg.Type()))
- }
- t := ir.NewFuncType(base.Pos, nil, funcArgs, nil)
-
- wrapCall_prgen++
- sym := typecheck.LookupNum("wrap·", wrapCall_prgen)
- fn := typecheck.DeclFunc(sym, t)
-
- args := ir.ParamNames(t.Type())
- for i, origArg := range origArgs {
- if origArg == nil {
- continue
- }
- args[i] = ir.NewConvExpr(base.Pos, origArg.Op(), origArg.Type(), args[i])
- }
- if n.Op() == ir.OCALLMETH || n.Op() == ir.OCALLINTER {
- // Move wrapped receiver argument back to its appropriate place.
- recv := typecheck.Expr(args[0])
- n.X.(*ir.SelectorExpr).X = recv
- args = args[1:]
- }
- call := ir.NewCallExpr(base.Pos, n.Op(), n.X, args)
- if !isBuiltinCall {
- call.SetOp(ir.OCALL)
- call.IsDDD = n.IsDDD
- }
- fn.Body = []ir.Node{call}
-
- typecheck.FinishFuncBody()
-
- typecheck.Func(fn)
- typecheck.Stmts(fn.Body)
- typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
-
- call = ir.NewCallExpr(base.Pos, ir.OCALL, fn.Nname, wrapArgs)
- return walkExpr(typecheck.Stmt(call), init)
-}
-
-// undoVariadic turns a call to a variadic function of the form
-//
-// f(a, b, []T{c, d, e}...)
-//
-// back into
-//
-// f(a, b, c, d, e)
-//
-func undoVariadic(call *ir.CallExpr) {
- if call.IsDDD {
- last := len(call.Args) - 1
- if va := call.Args[last]; va.Op() == ir.OSLICELIT {
- va := va.(*ir.CompLitExpr)
- call.Args = append(call.Args[:last], va.List...)
- call.IsDDD = false
- }
- }
-}
diff --git a/src/cmd/compile/internal/walk/switch.go b/src/cmd/compile/internal/walk/switch.go
index 162de018f6..3705c5b192 100644
--- a/src/cmd/compile/internal/walk/switch.go
+++ b/src/cmd/compile/internal/walk/switch.go
@@ -360,10 +360,10 @@ func walkSwitchType(sw *ir.SwitchStmt) {
}
if singleType != nil && singleType.IsInterface() {
- s.Add(ncase.Pos(), n1.Type(), caseVar, jmp)
+ s.Add(ncase.Pos(), n1, caseVar, jmp)
caseVarInitialized = true
} else {
- s.Add(ncase.Pos(), n1.Type(), nil, jmp)
+ s.Add(ncase.Pos(), n1, nil, jmp)
}
}
@@ -377,6 +377,17 @@ func walkSwitchType(sw *ir.SwitchStmt) {
}
val = ifaceData(ncase.Pos(), s.facename, singleType)
}
+ if len(ncase.List) == 1 && ncase.List[0].Op() == ir.ODYNAMICTYPE {
+ dt := ncase.List[0].(*ir.DynamicType)
+ x := ir.NewDynamicTypeAssertExpr(ncase.Pos(), ir.ODYNAMICDOTTYPE, val, dt.X)
+ if dt.ITab != nil {
+ // TODO: make ITab a separate field in DynamicTypeAssertExpr?
+ x.T = dt.ITab
+ }
+ x.SetType(caseVar.Type())
+ x.SetTypecheck(1)
+ val = x
+ }
l := []ir.Node{
ir.NewDecl(ncase.Pos(), ir.ODCL, caseVar),
ir.NewAssignStmt(ncase.Pos(), caseVar, val),
@@ -446,7 +457,8 @@ type typeClause struct {
body ir.Nodes
}
-func (s *typeSwitch) Add(pos src.XPos, typ *types.Type, caseVar *ir.Name, jmp ir.Node) {
+func (s *typeSwitch) Add(pos src.XPos, n1 ir.Node, caseVar *ir.Name, jmp ir.Node) {
+ typ := n1.Type()
var body ir.Nodes
if caseVar != nil {
l := []ir.Node{
@@ -462,9 +474,25 @@ func (s *typeSwitch) Add(pos src.XPos, typ *types.Type, caseVar *ir.Name, jmp ir
// cv, ok = iface.(type)
as := ir.NewAssignListStmt(pos, ir.OAS2, nil, nil)
as.Lhs = []ir.Node{caseVar, s.okname} // cv, ok =
- dot := ir.NewTypeAssertExpr(pos, s.facename, nil)
- dot.SetType(typ) // iface.(type)
- as.Rhs = []ir.Node{dot}
+ switch n1.Op() {
+ case ir.OTYPE:
+ // Static type assertion (non-generic)
+ dot := ir.NewTypeAssertExpr(pos, s.facename, nil)
+ dot.SetType(typ) // iface.(type)
+ as.Rhs = []ir.Node{dot}
+ case ir.ODYNAMICTYPE:
+ // Dynamic type assertion (generic)
+ dt := n1.(*ir.DynamicType)
+ dot := ir.NewDynamicTypeAssertExpr(pos, ir.ODYNAMICDOTTYPE, s.facename, dt.X)
+ if dt.ITab != nil {
+ dot.T = dt.ITab
+ }
+ dot.SetType(typ)
+ dot.SetTypecheck(1)
+ as.Rhs = []ir.Node{dot}
+ default:
+ base.Fatalf("unhandled type case %s", n1.Op())
+ }
appendWalkStmt(&body, as)
// if ok { goto label }
@@ -473,9 +501,10 @@ func (s *typeSwitch) Add(pos src.XPos, typ *types.Type, caseVar *ir.Name, jmp ir
nif.Body = []ir.Node{jmp}
body.Append(nif)
- if !typ.IsInterface() {
+ if n1.Op() == ir.OTYPE && !typ.IsInterface() {
+ // Defer static, noninterface cases so they can be binary searched by hash.
s.clauses = append(s.clauses, typeClause{
- hash: types.TypeHash(typ),
+ hash: types.TypeHash(n1.Type()),
body: body,
})
return
diff --git a/src/cmd/compile/internal/walk/walk.go b/src/cmd/compile/internal/walk/walk.go
index 26da6e3145..a9bbc9a54f 100644
--- a/src/cmd/compile/internal/walk/walk.go
+++ b/src/cmd/compile/internal/walk/walk.go
@@ -113,8 +113,7 @@ func vmkcall(fn ir.Node, t *types.Type, init *ir.Nodes, va []ir.Node) *ir.CallEx
base.Fatalf("vmkcall %v needs %v args got %v", fn, n, len(va))
}
- call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, va)
- typecheck.Call(call)
+ call := typecheck.Call(base.Pos, fn, va, false).(*ir.CallExpr)
call.SetType(t)
return walkExpr(call, init).(*ir.CallExpr)
}
@@ -308,12 +307,12 @@ func mayCall(n ir.Node) bool {
default:
base.FatalfAt(n.Pos(), "mayCall %+v", n)
- case ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER,
+ case ir.OCALLFUNC, ir.OCALLINTER,
ir.OUNSAFEADD, ir.OUNSAFESLICE:
return true
case ir.OINDEX, ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR, ir.OSLICESTR,
- ir.ODEREF, ir.ODOTPTR, ir.ODOTTYPE, ir.ODIV, ir.OMOD, ir.OSLICE2ARRPTR:
+ ir.ODEREF, ir.ODOTPTR, ir.ODOTTYPE, ir.ODYNAMICDOTTYPE, ir.ODIV, ir.OMOD, ir.OSLICE2ARRPTR:
// These ops might panic, make sure they are done
// before we start marshaling args for a call. See issue 16760.
return true
@@ -343,7 +342,7 @@ func mayCall(n ir.Node) bool {
ir.OCAP, ir.OIMAG, ir.OLEN, ir.OREAL,
ir.OCONVNOP, ir.ODOT,
ir.OCFUNC, ir.OIDATA, ir.OITAB, ir.OSPTR,
- ir.OBYTES2STRTMP, ir.OGETG, ir.OSLICEHEADER:
+ ir.OBYTES2STRTMP, ir.OGETG, ir.OGETCALLERPC, ir.OGETCALLERSP, ir.OSLICEHEADER:
// ok: operations that don't require function calls.
// Expand as needed.
}
diff --git a/src/cmd/compile/internal/wasm/ssa.go b/src/cmd/compile/internal/wasm/ssa.go
index 31b09016eb..0b2ca3fdbb 100644
--- a/src/cmd/compile/internal/wasm/ssa.go
+++ b/src/cmd/compile/internal/wasm/ssa.go
@@ -24,7 +24,6 @@ func Init(arch *ssagen.ArchInfo) {
arch.ZeroRange = zeroRange
arch.Ginsnop = ginsnop
- arch.Ginsnopdefer = ginsnop
arch.SSAMarkMoves = ssaMarkMoves
arch.SSAGenValue = ssaGenValue
@@ -126,7 +125,11 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
case ssa.OpWasmLoweredStaticCall, ssa.OpWasmLoweredClosureCall, ssa.OpWasmLoweredInterCall:
s.PrepareCall(v)
if call, ok := v.Aux.(*ssa.AuxCall); ok && call.Fn == ir.Syms.Deferreturn {
- // add a resume point before call to deferreturn so it can be called again via jmpdefer
+ // The runtime needs to inject jumps to
+ // deferreturn calls using the address in
+ // _func.deferreturn. Hence, the call to
+ // deferreturn must itself be a resumption
+ // point so it gets a target PC.
s.Prog(wasm.ARESUMEPOINT)
}
if v.Op == ssa.OpWasmLoweredClosureCall {
diff --git a/src/cmd/compile/internal/x86/galign.go b/src/cmd/compile/internal/x86/galign.go
index 00a20e429f..5565bd32c7 100644
--- a/src/cmd/compile/internal/x86/galign.go
+++ b/src/cmd/compile/internal/x86/galign.go
@@ -34,7 +34,6 @@ func Init(arch *ssagen.ArchInfo) {
arch.ZeroRange = zerorange
arch.Ginsnop = ginsnop
- arch.Ginsnopdefer = ginsnop
arch.SSAMarkMoves = ssaMarkMoves
}
diff --git a/src/cmd/dist/build.go b/src/cmd/dist/build.go
index 1abb03bcc5..33a329e48b 100644
--- a/src/cmd/dist/build.go
+++ b/src/cmd/dist/build.go
@@ -48,8 +48,6 @@ var (
exe string
defaultcc map[string]string
defaultcxx map[string]string
- defaultcflags string
- defaultldflags string
defaultpkgconfig string
defaultldso string
@@ -209,9 +207,6 @@ func xinit() {
defaultcc = compilerEnv("CC", cc)
defaultcxx = compilerEnv("CXX", cxx)
- defaultcflags = os.Getenv("CFLAGS")
- defaultldflags = os.Getenv("LDFLAGS")
-
b = os.Getenv("PKG_CONFIG")
if b == "" {
b = "pkg-config"
@@ -1263,14 +1258,19 @@ func cmdbootstrap() {
timelog("start", "dist bootstrap")
defer timelog("end", "dist bootstrap")
- var noBanner bool
+ var noBanner, noClean bool
var debug bool
flag.BoolVar(&rebuildall, "a", rebuildall, "rebuild all")
flag.BoolVar(&debug, "d", debug, "enable debugging of bootstrap process")
flag.BoolVar(&noBanner, "no-banner", noBanner, "do not print banner")
+ flag.BoolVar(&noClean, "no-clean", noClean, "print deprecation warning")
xflagparse(0)
+ if noClean {
+ xprintf("warning: --no-clean is deprecated and has no effect; use 'go install std cmd' instead\n")
+ }
+
// Set GOPATH to an internal directory. We shouldn't actually
// need to store files here, since the toolchain won't
// depend on modules outside of vendor directories, but if
diff --git a/src/cmd/dist/test.go b/src/cmd/dist/test.go
index f40fa926df..a104b5c8f3 100644
--- a/src/cmd/dist/test.go
+++ b/src/cmd/dist/test.go
@@ -491,19 +491,6 @@ func (t *tester) registerTests() {
})
}
- // Test go/... cmd/gofmt with type parameters enabled.
- if !t.compileOnly {
- t.tests = append(t.tests, distTest{
- name: "tyepparams",
- heading: "go/... and cmd/gofmt tests with tag typeparams",
- fn: func(dt *distTest) error {
- t.addCmd(dt, "src", t.goTest(), t.timeout(300), "-tags=typeparams", "go/...")
- t.addCmd(dt, "src", t.goTest(), t.timeout(300), "-tags=typeparams", "cmd/gofmt")
- return nil
- },
- })
- }
-
if t.iOS() && !t.compileOnly {
t.tests = append(t.tests, distTest{
name: "x509omitbundledroots",
diff --git a/src/cmd/go.mod b/src/cmd/go.mod
index b0a3f48045..b12d1991b9 100644
--- a/src/cmd/go.mod
+++ b/src/cmd/go.mod
@@ -1,6 +1,6 @@
module cmd
-go 1.17
+go 1.18
require (
github.com/google/pprof v0.0.0-20210506205249-923b5ab0fc1a
@@ -10,6 +10,6 @@ require (
golang.org/x/mod v0.4.3-0.20210723200715-e41a6a4f3b61
golang.org/x/sys v0.0.0-20210511113859-b0526f3d8744 // indirect
golang.org/x/term v0.0.0-20210503060354-a79de5458b56
- golang.org/x/tools v0.1.6-0.20210726171848-ebce39e5e3d6
+ golang.org/x/tools v0.1.6-0.20210809225032-337cebd2c151
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
)
diff --git a/src/cmd/go.sum b/src/cmd/go.sum
index bf237d40bc..1db50ca302 100644
--- a/src/cmd/go.sum
+++ b/src/cmd/go.sum
@@ -5,41 +5,18 @@ github.com/google/pprof v0.0.0-20210506205249-923b5ab0fc1a h1:jmAp/2PZAScNd62lTD
github.com/google/pprof v0.0.0-20210506205249-923b5ab0fc1a/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639 h1:mV02weKRL81bEnm8A0HT1/CAelMQDBuQIfLw8n+d6xI=
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
-github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
golang.org/x/arch v0.0.0-20210502124803-cbf565b21d1e h1:pv3V0NlNSh5Q6AX/StwGLBjcLS7UN4m4Gq+V+uSecqM=
golang.org/x/arch v0.0.0-20210502124803-cbf565b21d1e/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
-golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
-golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20210503195802-e9a32991a82e h1:8foAy0aoO5GkqCvAEJ4VC4P3zksTg4X4aJCDpZzmgQI=
golang.org/x/crypto v0.0.0-20210503195802-e9a32991a82e/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8=
-golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.3-0.20210723200715-e41a6a4f3b61 h1:gQY3CVezomIImcWCpxp6Mhj+fXCOZ+gD8/88326LVqw=
golang.org/x/mod v0.4.3-0.20210723200715-e41a6a4f3b61/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
-golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
-golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
-golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
-golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210511113859-b0526f3d8744 h1:yhBbb4IRs2HS9PPlAg6DMC6mUOKexJBNsLf4Z+6En1Q=
golang.org/x/sys v0.0.0-20210511113859-b0526f3d8744/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210503060354-a79de5458b56 h1:b8jxX3zqjpqb2LklXPzKSGJhzyxCOZSz8ncv8Nv+y7w=
golang.org/x/term v0.0.0-20210503060354-a79de5458b56/go.mod h1:tfny5GFUkzUvx4ps4ajbZsCe5lw1metzhBm9T3x7oIY=
-golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
-golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
-golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
-golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.1.6-0.20210726171848-ebce39e5e3d6 h1:er++nfKy5Irv8JPDfJ68QzoVKQ6MBF7cf5xC15O4Zy0=
-golang.org/x/tools v0.1.6-0.20210726171848-ebce39e5e3d6/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
-golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
-golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/tools v0.1.6-0.20210809225032-337cebd2c151 h1:jHjT6WuVKEMzjJgrS1+r1wk54oxwqumUnvtn0QZXyXE=
+golang.org/x/tools v0.1.6-0.20210809225032-337cebd2c151/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
-rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
diff --git a/src/cmd/go/alldocs.go b/src/cmd/go/alldocs.go
index e0973acbf4..425aa831d8 100644
--- a/src/cmd/go/alldocs.go
+++ b/src/cmd/go/alldocs.go
@@ -1503,7 +1503,8 @@
// used. That subset is: 'atomic', 'bool', 'buildtags', 'errorsas',
// 'ifaceassert', 'nilfunc', 'printf', and 'stringintconv'. You can see
// the documentation for these and other vet tests via "go doc cmd/vet".
-// To disable the running of go vet, use the -vet=off flag.
+// To disable the running of go vet, use the -vet=off flag. To run all
+// checks, use the -vet=all flag.
//
// All test output and summary lines are printed to the go command's
// standard output, even if the test printed them to its own standard
@@ -1544,16 +1545,16 @@
// The rule for a match in the cache is that the run involves the same
// test binary and the flags on the command line come entirely from a
// restricted set of 'cacheable' test flags, defined as -benchtime, -cpu,
-// -list, -parallel, -run, -short, and -v. If a run of go test has any test
-// or non-test flags outside this set, the result is not cached. To
-// disable test caching, use any test flag or argument other than the
-// cacheable flags. The idiomatic way to disable test caching explicitly
-// is to use -count=1. Tests that open files within the package's source
-// root (usually $GOPATH) or that consult environment variables only
-// match future runs in which the files and environment variables are unchanged.
-// A cached test result is treated as executing in no time at all,
-// so a successful package test result will be cached and reused
-// regardless of -timeout setting.
+// -list, -parallel, -run, -short, -timeout, -failfast, and -v.
+// If a run of go test has any test or non-test flags outside this set,
+// the result is not cached. To disable test caching, use any test flag
+// or argument other than the cacheable flags. The idiomatic way to disable
+// test caching explicitly is to use -count=1. Tests that open files within
+// the package's source root (usually $GOPATH) or that consult environment
+// variables only match future runs in which the files and environment
+// variables are unchanged. A cached test result is treated as executing
+// in no time at all,so a successful package test result will be cached and
+// reused regardless of -timeout setting.
//
// In addition to the build flags, the flags handled by 'go test' itself are:
//
@@ -2756,6 +2757,10 @@
// -failfast
// Do not start new tests after the first test failure.
//
+// -json
+// Log verbose output and test results in JSON. This presents the
+// same information as the -v flag in a machine-readable format.
+//
// -list regexp
// List tests, benchmarks, or examples matching the regular expression.
// No tests, benchmarks or examples will be run. This will only
diff --git a/src/cmd/go/go_test.go b/src/cmd/go/go_test.go
index 6ce276537b..b13191f678 100644
--- a/src/cmd/go/go_test.go
+++ b/src/cmd/go/go_test.go
@@ -806,7 +806,9 @@ func TestNewReleaseRebuildsStalePackagesInGOPATH(t *testing.T) {
"src/internal/abi",
"src/internal/bytealg",
"src/internal/cpu",
+ "src/internal/goarch",
"src/internal/goexperiment",
+ "src/internal/goos",
"src/math/bits",
"src/unsafe",
filepath.Join("pkg", runtime.GOOS+"_"+runtime.GOARCH),
diff --git a/src/cmd/go/internal/modcmd/vendor.go b/src/cmd/go/internal/modcmd/vendor.go
index a51ac21751..1effcea1a0 100644
--- a/src/cmd/go/internal/modcmd/vendor.go
+++ b/src/cmd/go/internal/modcmd/vendor.go
@@ -243,7 +243,7 @@ func vendorPkg(vdir, pkg string) {
if err != nil {
if errors.As(err, &noGoError) {
return // No source files in this package are built. Skip embeds in ignored files.
- } else if !errors.As(err, &multiplePackageError) { // multiplePackgeErrors are okay, but others are not.
+ } else if !errors.As(err, &multiplePackageError) { // multiplePackageErrors are OK, but others are not.
base.Fatalf("internal error: failed to find embedded files of %s: %v\n", pkg, err)
}
}
diff --git a/src/cmd/go/internal/modfetch/codehost/git.go b/src/cmd/go/internal/modfetch/codehost/git.go
index 4d4964edf4..a782de56ff 100644
--- a/src/cmd/go/internal/modfetch/codehost/git.go
+++ b/src/cmd/go/internal/modfetch/codehost/git.go
@@ -170,59 +170,63 @@ func (r *gitRepo) loadLocalTags() {
}
// loadRefs loads heads and tags references from the remote into the map r.refs.
-// Should only be called as r.refsOnce.Do(r.loadRefs).
-func (r *gitRepo) loadRefs() {
- // The git protocol sends all known refs and ls-remote filters them on the client side,
- // so we might as well record both heads and tags in one shot.
- // Most of the time we only care about tags but sometimes we care about heads too.
- out, gitErr := Run(r.dir, "git", "ls-remote", "-q", r.remote)
- if gitErr != nil {
- if rerr, ok := gitErr.(*RunError); ok {
- if bytes.Contains(rerr.Stderr, []byte("fatal: could not read Username")) {
- rerr.HelpText = "Confirm the import path was entered correctly.\nIf this is a private repository, see https://golang.org/doc/faq#git_https for additional information."
+// The result is cached in memory.
+func (r *gitRepo) loadRefs() (map[string]string, error) {
+ r.refsOnce.Do(func() {
+ // The git protocol sends all known refs and ls-remote filters them on the client side,
+ // so we might as well record both heads and tags in one shot.
+ // Most of the time we only care about tags but sometimes we care about heads too.
+ out, gitErr := Run(r.dir, "git", "ls-remote", "-q", r.remote)
+ if gitErr != nil {
+ if rerr, ok := gitErr.(*RunError); ok {
+ if bytes.Contains(rerr.Stderr, []byte("fatal: could not read Username")) {
+ rerr.HelpText = "Confirm the import path was entered correctly.\nIf this is a private repository, see https://golang.org/doc/faq#git_https for additional information."
+ }
}
- }
- // If the remote URL doesn't exist at all, ideally we should treat the whole
- // repository as nonexistent by wrapping the error in a notExistError.
- // For HTTP and HTTPS, that's easy to detect: we'll try to fetch the URL
- // ourselves and see what code it serves.
- if u, err := url.Parse(r.remoteURL); err == nil && (u.Scheme == "http" || u.Scheme == "https") {
- if _, err := web.GetBytes(u); errors.Is(err, fs.ErrNotExist) {
- gitErr = notExistError{gitErr}
+ // If the remote URL doesn't exist at all, ideally we should treat the whole
+ // repository as nonexistent by wrapping the error in a notExistError.
+ // For HTTP and HTTPS, that's easy to detect: we'll try to fetch the URL
+ // ourselves and see what code it serves.
+ if u, err := url.Parse(r.remoteURL); err == nil && (u.Scheme == "http" || u.Scheme == "https") {
+ if _, err := web.GetBytes(u); errors.Is(err, fs.ErrNotExist) {
+ gitErr = notExistError{gitErr}
+ }
}
- }
- r.refsErr = gitErr
- return
- }
-
- r.refs = make(map[string]string)
- for _, line := range strings.Split(string(out), "\n") {
- f := strings.Fields(line)
- if len(f) != 2 {
- continue
+ r.refsErr = gitErr
+ return
}
- if f[1] == "HEAD" || strings.HasPrefix(f[1], "refs/heads/") || strings.HasPrefix(f[1], "refs/tags/") {
- r.refs[f[1]] = f[0]
+
+ refs := make(map[string]string)
+ for _, line := range strings.Split(string(out), "\n") {
+ f := strings.Fields(line)
+ if len(f) != 2 {
+ continue
+ }
+ if f[1] == "HEAD" || strings.HasPrefix(f[1], "refs/heads/") || strings.HasPrefix(f[1], "refs/tags/") {
+ refs[f[1]] = f[0]
+ }
}
- }
- for ref, hash := range r.refs {
- if strings.HasSuffix(ref, "^{}") { // record unwrapped annotated tag as value of tag
- r.refs[strings.TrimSuffix(ref, "^{}")] = hash
- delete(r.refs, ref)
+ for ref, hash := range refs {
+ if strings.HasSuffix(ref, "^{}") { // record unwrapped annotated tag as value of tag
+ refs[strings.TrimSuffix(ref, "^{}")] = hash
+ delete(refs, ref)
+ }
}
- }
+ r.refs = refs
+ })
+ return r.refs, r.refsErr
}
func (r *gitRepo) Tags(prefix string) ([]string, error) {
- r.refsOnce.Do(r.loadRefs)
- if r.refsErr != nil {
- return nil, r.refsErr
+ refs, err := r.loadRefs()
+ if err != nil {
+ return nil, err
}
tags := []string{}
- for ref := range r.refs {
+ for ref := range refs {
if !strings.HasPrefix(ref, "refs/tags/") {
continue
}
@@ -237,14 +241,14 @@ func (r *gitRepo) Tags(prefix string) ([]string, error) {
}
func (r *gitRepo) Latest() (*RevInfo, error) {
- r.refsOnce.Do(r.loadRefs)
- if r.refsErr != nil {
- return nil, r.refsErr
+ refs, err := r.loadRefs()
+ if err != nil {
+ return nil, err
}
- if r.refs["HEAD"] == "" {
+ if refs["HEAD"] == "" {
return nil, ErrNoCommits
}
- return r.Stat(r.refs["HEAD"])
+ return r.Stat(refs["HEAD"])
}
// findRef finds some ref name for the given hash,
@@ -252,8 +256,11 @@ func (r *gitRepo) Latest() (*RevInfo, error) {
// There may be multiple ref names for a given hash,
// in which case this returns some name - it doesn't matter which.
func (r *gitRepo) findRef(hash string) (ref string, ok bool) {
- r.refsOnce.Do(r.loadRefs)
- for ref, h := range r.refs {
+ refs, err := r.loadRefs()
+ if err != nil {
+ return "", false
+ }
+ for ref, h := range refs {
if h == hash {
return ref, true
}
@@ -295,29 +302,32 @@ func (r *gitRepo) stat(rev string) (*RevInfo, error) {
// Maybe rev is the name of a tag or branch on the remote server.
// Or maybe it's the prefix of a hash of a named ref.
// Try to resolve to both a ref (git name) and full (40-hex-digit) commit hash.
- r.refsOnce.Do(r.loadRefs)
+ refs, err := r.loadRefs()
+ if err != nil {
+ return nil, err
+ }
// loadRefs may return an error if git fails, for example segfaults, or
// could not load a private repo, but defer checking to the else block
// below, in case we already have the rev in question in the local cache.
var ref, hash string
- if r.refs["refs/tags/"+rev] != "" {
+ if refs["refs/tags/"+rev] != "" {
ref = "refs/tags/" + rev
- hash = r.refs[ref]
+ hash = refs[ref]
// Keep rev as is: tags are assumed not to change meaning.
- } else if r.refs["refs/heads/"+rev] != "" {
+ } else if refs["refs/heads/"+rev] != "" {
ref = "refs/heads/" + rev
- hash = r.refs[ref]
+ hash = refs[ref]
rev = hash // Replace rev, because meaning of refs/heads/foo can change.
- } else if rev == "HEAD" && r.refs["HEAD"] != "" {
+ } else if rev == "HEAD" && refs["HEAD"] != "" {
ref = "HEAD"
- hash = r.refs[ref]
+ hash = refs[ref]
rev = hash // Replace rev, because meaning of HEAD can change.
} else if len(rev) >= minHashDigits && len(rev) <= 40 && AllHex(rev) {
// At the least, we have a hash prefix we can look up after the fetch below.
// Maybe we can map it to a full hash using the known refs.
prefix := rev
// Check whether rev is prefix of known ref hash.
- for k, h := range r.refs {
+ for k, h := range refs {
if strings.HasPrefix(h, prefix) {
if hash != "" && hash != h {
// Hash is an ambiguous hash prefix.
@@ -335,9 +345,6 @@ func (r *gitRepo) stat(rev string) (*RevInfo, error) {
hash = rev
}
} else {
- if r.refsErr != nil {
- return nil, r.refsErr
- }
return nil, &UnknownRevisionError{Rev: rev}
}
@@ -535,12 +542,12 @@ func (r *gitRepo) ReadFileRevs(revs []string, file string, maxSize int64) (map[s
// Build list of known remote refs that might help.
var redo []string
- r.refsOnce.Do(r.loadRefs)
- if r.refsErr != nil {
- return nil, r.refsErr
+ refs, err := r.loadRefs()
+ if err != nil {
+ return nil, err
}
for _, tag := range need {
- if r.refs["refs/tags/"+tag] != "" {
+ if refs["refs/tags/"+tag] != "" {
redo = append(redo, tag)
}
}
diff --git a/src/cmd/go/internal/modload/buildlist.go b/src/cmd/go/internal/modload/buildlist.go
index 4fbe563cb8..9989bb5b2a 100644
--- a/src/cmd/go/internal/modload/buildlist.go
+++ b/src/cmd/go/internal/modload/buildlist.go
@@ -196,6 +196,19 @@ func (rs *Requirements) rootSelected(path string) (version string, ok bool) {
return "", false
}
+// hasRedundantRoot returns true if the root list contains multiple requirements
+// of the same module or a requirement on any version of the main module.
+// Redundant requirements should be pruned, but they may influence version
+// selection.
+func (rs *Requirements) hasRedundantRoot() bool {
+ for i, m := range rs.rootModules {
+ if MainModules.Contains(m.Path) || (i > 0 && m.Path == rs.rootModules[i-1].Path) {
+ return true
+ }
+ }
+ return false
+}
+
// Graph returns the graph of module requirements loaded from the current
// root modules (as reported by RootModules).
//
@@ -897,6 +910,12 @@ func updateLazyRoots(ctx context.Context, direct map[string]bool, rs *Requiremen
// and (trivially) version.
if !rootsUpgraded {
+ if cfg.BuildMod != "mod" {
+ // The only changes to the root set (if any) were to remove duplicates.
+ // The requirements are consistent (if perhaps redundant), so keep the
+ // original rs to preserve its ModuleGraph.
+ return rs, nil
+ }
// The root set has converged: every root going into this iteration was
// already at its selected version, although we have have removed other
// (redundant) roots for the same path.
diff --git a/src/cmd/go/internal/modload/init.go b/src/cmd/go/internal/modload/init.go
index 1a91b83148..896c61d19d 100644
--- a/src/cmd/go/internal/modload/init.go
+++ b/src/cmd/go/internal/modload/init.go
@@ -674,6 +674,18 @@ func loadModFile(ctx context.Context) (rs *Requirements, needCommit bool) {
checkVendorConsistency(index, modFile)
rs.initVendor(vendorList)
}
+
+ if rs.hasRedundantRoot() {
+ // If any module path appears more than once in the roots, we know that the
+ // go.mod file needs to be updated even though we have not yet loaded any
+ // transitive dependencies.
+ var err error
+ rs, err = updateRoots(ctx, rs.direct, rs, nil, nil, false)
+ if err != nil {
+ base.Fatalf("go: %v", err)
+ }
+ }
+
if MainModules.Index(mainModule).goVersionV == "" {
// TODO(#45551): Do something more principled instead of checking
// cfg.CmdName directly here.
@@ -734,6 +746,13 @@ func CreateModFile(ctx context.Context, modPath string) {
}
}
base.Fatalf("go: %v", err)
+ } else if _, _, ok := module.SplitPathVersion(modPath); !ok {
+ if strings.HasPrefix(modPath, "gopkg.in/") {
+ invalidMajorVersionMsg := fmt.Errorf("module paths beginning with gopkg.in/ must always have a major version suffix in the form of .vN:\n\tgo mod init %s", suggestGopkgIn(modPath))
+ base.Fatalf(`go: invalid module path "%v": %v`, modPath, invalidMajorVersionMsg)
+ }
+ invalidMajorVersionMsg := fmt.Errorf("major version suffixes must be in the form of /vN and are only allowed for v2 or later:\n\tgo mod init %s", suggestModulePath(modPath))
+ base.Fatalf(`go: invalid module path "%v": %v`, modPath, invalidMajorVersionMsg)
}
fmt.Fprintf(os.Stderr, "go: creating new go.mod: module %s\n", modPath)
@@ -750,7 +769,12 @@ func CreateModFile(ctx context.Context, modPath string) {
base.Fatalf("go: %v", err)
}
- commitRequirements(ctx, modFileGoVersion(), requirementsFromModFiles(ctx, []*modfile.File{modFile}))
+ rs := requirementsFromModFiles(ctx, []*modfile.File{modFile})
+ rs, err = updateRoots(ctx, rs.direct, rs, nil, nil, false)
+ if err != nil {
+ base.Fatalf("go: %v", err)
+ }
+ commitRequirements(ctx, modFileGoVersion(), rs)
// Suggest running 'go mod tidy' unless the project is empty. Even if we
// imported all the correct requirements above, we're probably missing
@@ -930,7 +954,6 @@ func requirementsFromModFiles(ctx context.Context, modFiles []*modfile.File) *Re
}
roots = append(roots, r.Mod)
- mPathCount[r.Mod.Path]++
if !r.Indirect {
direct[r.Mod.Path] = true
}
@@ -938,21 +961,6 @@ func requirementsFromModFiles(ctx context.Context, modFiles []*modfile.File) *Re
}
module.Sort(roots)
rs := newRequirements(modDepthFromGoVersion(modFileGoVersion()), roots, direct)
-
- // If any module path appears more than once in the roots, we know that the
- // go.mod file needs to be updated even though we have not yet loaded any
- // transitive dependencies.
- for _, n := range mPathCount {
- if n > 1 {
- var err error
- rs, err = updateRoots(ctx, rs.direct, rs, nil, nil, false)
- if err != nil {
- base.Fatalf("go: %v", err)
- }
- break
- }
- }
-
return rs
}
@@ -1541,3 +1549,56 @@ const (
func modkey(m module.Version) module.Version {
return module.Version{Path: m.Path, Version: m.Version + "/go.mod"}
}
+
+func suggestModulePath(path string) string {
+ var m string
+
+ i := len(path)
+ for i > 0 && ('0' <= path[i-1] && path[i-1] <= '9' || path[i-1] == '.') {
+ i--
+ }
+ url := path[:i]
+ url = strings.TrimSuffix(url, "/v")
+ url = strings.TrimSuffix(url, "/")
+
+ f := func(c rune) bool {
+ return c > '9' || c < '0'
+ }
+ s := strings.FieldsFunc(path[i:], f)
+ if len(s) > 0 {
+ m = s[0]
+ }
+ m = strings.TrimLeft(m, "0")
+ if m == "" || m == "1" {
+ return url + "/v2"
+ }
+
+ return url + "/v" + m
+}
+
+func suggestGopkgIn(path string) string {
+ var m string
+ i := len(path)
+ for i > 0 && (('0' <= path[i-1] && path[i-1] <= '9') || (path[i-1] == '.')) {
+ i--
+ }
+ url := path[:i]
+ url = strings.TrimSuffix(url, ".v")
+ url = strings.TrimSuffix(url, "/v")
+ url = strings.TrimSuffix(url, "/")
+
+ f := func(c rune) bool {
+ return c > '9' || c < '0'
+ }
+ s := strings.FieldsFunc(path, f)
+ if len(s) > 0 {
+ m = s[0]
+ }
+
+ m = strings.TrimLeft(m, "0")
+
+ if m == "" {
+ return url + ".v1"
+ }
+ return url + ".v" + m
+}
diff --git a/src/cmd/go/internal/test/test.go b/src/cmd/go/internal/test/test.go
index 5e02a397a6..d65f54f2bd 100644
--- a/src/cmd/go/internal/test/test.go
+++ b/src/cmd/go/internal/test/test.go
@@ -79,7 +79,8 @@ binary. Only a high-confidence subset of the default go vet checks are
used. That subset is: 'atomic', 'bool', 'buildtags', 'errorsas',
'ifaceassert', 'nilfunc', 'printf', and 'stringintconv'. You can see
the documentation for these and other vet tests via "go doc cmd/vet".
-To disable the running of go vet, use the -vet=off flag.
+To disable the running of go vet, use the -vet=off flag. To run all
+checks, use the -vet=all flag.
All test output and summary lines are printed to the go command's
standard output, even if the test printed them to its own standard
@@ -120,16 +121,16 @@ elapsed time in the summary line.
The rule for a match in the cache is that the run involves the same
test binary and the flags on the command line come entirely from a
restricted set of 'cacheable' test flags, defined as -benchtime, -cpu,
--list, -parallel, -run, -short, and -v. If a run of go test has any test
-or non-test flags outside this set, the result is not cached. To
-disable test caching, use any test flag or argument other than the
-cacheable flags. The idiomatic way to disable test caching explicitly
-is to use -count=1. Tests that open files within the package's source
-root (usually $GOPATH) or that consult environment variables only
-match future runs in which the files and environment variables are unchanged.
-A cached test result is treated as executing in no time at all,
-so a successful package test result will be cached and reused
-regardless of -timeout setting.
+-list, -parallel, -run, -short, -timeout, -failfast, and -v.
+If a run of go test has any test or non-test flags outside this set,
+the result is not cached. To disable test caching, use any test flag
+or argument other than the cacheable flags. The idiomatic way to disable
+test caching explicitly is to use -count=1. Tests that open files within
+the package's source root (usually $GOPATH) or that consult environment
+variables only match future runs in which the files and environment
+variables are unchanged. A cached test result is treated as executing
+in no time at all,so a successful package test result will be cached and
+reused regardless of -timeout setting.
In addition to the build flags, the flags handled by 'go test' itself are:
@@ -243,6 +244,10 @@ control the execution of any test:
-failfast
Do not start new tests after the first test failure.
+ -json
+ Log verbose output and test results in JSON. This presents the
+ same information as the -v flag in a machine-readable format.
+
-list regexp
List tests, benchmarks, or examples matching the regular expression.
No tests, benchmarks or examples will be run. This will only
@@ -1349,6 +1354,7 @@ func (c *runCache) tryCacheWithID(b *work.Builder, a *work.Action, id string) bo
"-test.run",
"-test.short",
"-test.timeout",
+ "-test.failfast",
"-test.v":
// These are cacheable.
// Note that this list is documented above,
diff --git a/src/cmd/go/internal/test/testflag.go b/src/cmd/go/internal/test/testflag.go
index f129346d0d..97a9ef38b9 100644
--- a/src/cmd/go/internal/test/testflag.go
+++ b/src/cmd/go/internal/test/testflag.go
@@ -5,6 +5,10 @@
package test
import (
+ "cmd/go/internal/base"
+ "cmd/go/internal/cfg"
+ "cmd/go/internal/cmdflag"
+ "cmd/go/internal/work"
"errors"
"flag"
"fmt"
@@ -13,11 +17,6 @@ import (
"strconv"
"strings"
"time"
-
- "cmd/go/internal/base"
- "cmd/go/internal/cfg"
- "cmd/go/internal/cmdflag"
- "cmd/go/internal/work"
)
//go:generate go run ./genflags.go
@@ -135,6 +134,7 @@ type outputdirFlag struct {
func (f *outputdirFlag) String() string {
return f.abs
}
+
func (f *outputdirFlag) Set(value string) (err error) {
if value == "" {
f.abs = ""
@@ -143,6 +143,7 @@ func (f *outputdirFlag) Set(value string) (err error) {
}
return err
}
+
func (f *outputdirFlag) getAbs() string {
if f.abs == "" {
return base.Cwd()
@@ -151,8 +152,12 @@ func (f *outputdirFlag) getAbs() string {
}
// vetFlag implements the special parsing logic for the -vet flag:
-// a comma-separated list, with a distinguished value "off" and
-// a boolean tracking whether it was set explicitly.
+// a comma-separated list, with distinguished values "all" and
+// "off", plus a boolean tracking whether it was set explicitly.
+//
+// "all" is encoded as vetFlag{true, false, nil}, since it will
+// pass no flags to the vet binary, and by default, it runs all
+// analyzers.
type vetFlag struct {
explicit bool
off bool
@@ -160,7 +165,10 @@ type vetFlag struct {
}
func (f *vetFlag) String() string {
- if f.off {
+ switch {
+ case !f.off && !f.explicit && len(f.flags) == 0:
+ return "all"
+ case f.off:
return "off"
}
@@ -175,32 +183,38 @@ func (f *vetFlag) String() string {
}
func (f *vetFlag) Set(value string) error {
- if value == "" {
+ switch {
+ case value == "":
*f = vetFlag{flags: defaultVetFlags}
return nil
- }
-
- if value == "off" {
- *f = vetFlag{
- explicit: true,
- off: true,
- }
- return nil
- }
-
- if strings.Contains(value, "=") {
+ case strings.Contains(value, "="):
return fmt.Errorf("-vet argument cannot contain equal signs")
- }
- if strings.Contains(value, " ") {
+ case strings.Contains(value, " "):
return fmt.Errorf("-vet argument is comma-separated list, cannot contain spaces")
}
*f = vetFlag{explicit: true}
+ var single string
for _, arg := range strings.Split(value, ",") {
- if arg == "" {
+ switch arg {
+ case "":
return fmt.Errorf("-vet argument contains empty list element")
+ case "all":
+ single = arg
+ *f = vetFlag{explicit: true}
+ continue
+ case "off":
+ single = arg
+ *f = vetFlag{
+ explicit: true,
+ off: true,
+ }
+ continue
}
f.flags = append(f.flags, "-"+arg)
}
+ if len(f.flags) > 1 && single != "" {
+ return fmt.Errorf("-vet does not accept %q in a list with other analyzers", single)
+ }
return nil
}
diff --git a/src/cmd/go/internal/work/gc.go b/src/cmd/go/internal/work/gc.go
index 1fc825de47..eee8adca94 100644
--- a/src/cmd/go/internal/work/gc.go
+++ b/src/cmd/go/internal/work/gc.go
@@ -29,6 +29,18 @@ import (
// The 'path' used for GOROOT_FINAL when -trimpath is specified
const trimPathGoRootFinal = "go"
+var runtimePackages = map[string]struct{}{
+ "internal/abi": struct{}{},
+ "internal/bytealg": struct{}{},
+ "internal/cpu": struct{}{},
+ "internal/goarch": struct{}{},
+ "internal/goos": struct{}{},
+ "runtime": struct{}{},
+ "runtime/internal/atomic": struct{}{},
+ "runtime/internal/math": struct{}{},
+ "runtime/internal/sys": struct{}{},
+}
+
// The Go toolchain.
type gcToolchain struct{}
@@ -88,11 +100,8 @@ func (gcToolchain) gc(b *Builder, a *Action, archive string, importcfg, embedcfg
if p.Standard {
gcargs = append(gcargs, "-std")
}
- compilingRuntime := p.Standard && (p.ImportPath == "runtime" || strings.HasPrefix(p.ImportPath, "runtime/internal"))
- // The runtime package imports a couple of general internal packages.
- if p.Standard && (p.ImportPath == "internal/cpu" || p.ImportPath == "internal/bytealg" || p.ImportPath == "internal/abi") {
- compilingRuntime = true
- }
+ _, compilingRuntime := runtimePackages[p.ImportPath]
+ compilingRuntime = compilingRuntime && p.Standard
if compilingRuntime {
// runtime compiles with a special gc flag to check for
// memory allocations that are invalid in the runtime package,
@@ -365,6 +374,11 @@ func asmArgs(a *Action, p *load.Package) []interface{} {
args = append(args, "-compiling-runtime")
}
+ if cfg.Goarch == "386" {
+ // Define GO386_value from cfg.GO386.
+ args = append(args, "-D", "GO386_"+cfg.GO386)
+ }
+
if cfg.Goarch == "mips" || cfg.Goarch == "mipsle" {
// Define GOMIPS_value from cfg.GOMIPS.
args = append(args, "-D", "GOMIPS_"+cfg.GOMIPS)
diff --git a/src/cmd/go/internal/work/init.go b/src/cmd/go/internal/work/init.go
index 022137390f..7acee3dd55 100644
--- a/src/cmd/go/internal/work/init.go
+++ b/src/cmd/go/internal/work/init.go
@@ -40,6 +40,10 @@ func BuildInit() {
cfg.BuildPkgdir = p
}
+ if cfg.BuildP <= 0 {
+ base.Fatalf("go: -p must be a positive integer: %v\n", cfg.BuildP)
+ }
+
// Make sure CC, CXX, and FC are absolute paths.
for _, key := range []string{"CC", "CXX", "FC"} {
value := cfg.Getenv(key)
diff --git a/src/cmd/go/testdata/mod/example.com_split-incompatible_v2.0.0+incompatible.txt b/src/cmd/go/testdata/mod/example.com_split-incompatible_v2.0.0+incompatible.txt
index 35c3f27710..00076d74fc 100644
--- a/src/cmd/go/testdata/mod/example.com_split-incompatible_v2.0.0+incompatible.txt
+++ b/src/cmd/go/testdata/mod/example.com_split-incompatible_v2.0.0+incompatible.txt
@@ -1,6 +1,6 @@
Written by hand.
Test case for getting a package that has been moved to a nested module,
-with a +incompatible verison (and thus no go.mod file) at the root module.
+with a +incompatible version (and thus no go.mod file) at the root module.
-- .mod --
module example.com/split-incompatible
diff --git a/src/cmd/go/testdata/mod/example.com_split-incompatible_v2.1.0-pre+incompatible.txt b/src/cmd/go/testdata/mod/example.com_split-incompatible_v2.1.0-pre+incompatible.txt
index 917fc0f559..bb1c1fecc9 100644
--- a/src/cmd/go/testdata/mod/example.com_split-incompatible_v2.1.0-pre+incompatible.txt
+++ b/src/cmd/go/testdata/mod/example.com_split-incompatible_v2.1.0-pre+incompatible.txt
@@ -1,6 +1,6 @@
Written by hand.
Test case for getting a package that has been moved to a nested module,
-with a +incompatible verison (and thus no go.mod file) at the root module.
+with a +incompatible version (and thus no go.mod file) at the root module.
-- .mod --
module example.com/split-incompatible
diff --git a/src/cmd/go/testdata/script/build_negative_p.txt b/src/cmd/go/testdata/script/build_negative_p.txt
new file mode 100644
index 0000000000..9123907dc8
--- /dev/null
+++ b/src/cmd/go/testdata/script/build_negative_p.txt
@@ -0,0 +1,5 @@
+! go build -p=-1 example.go
+stderr 'go: -p must be a positive integer: -1'
+
+-- example.go --
+package example \ No newline at end of file
diff --git a/src/cmd/go/testdata/script/mod_init_invalid_major.txt b/src/cmd/go/testdata/script/mod_init_invalid_major.txt
new file mode 100644
index 0000000000..ae93e70d63
--- /dev/null
+++ b/src/cmd/go/testdata/script/mod_init_invalid_major.txt
@@ -0,0 +1,82 @@
+env GO111MODULE=on
+env GOFLAGS=-mod=mod
+
+! go mod init example.com/user/repo/v0
+stderr '(?s)^go: invalid module path "example.com/user/repo/v0": major version suffixes must be in the form of /vN and are only allowed for v2 or later(.*)go mod init example.com/user/repo/v2$'
+
+! go mod init example.com/user/repo/v02
+stderr '(?s)^go: invalid module path "example.com/user/repo/v02": major version suffixes must be in the form of /vN and are only allowed for v2 or later(.*)go mod init example.com/user/repo/v2$'
+
+! go mod init example.com/user/repo/v023
+stderr '(?s)^go: invalid module path "example.com/user/repo/v023": major version suffixes must be in the form of /vN and are only allowed for v2 or later(.*)go mod init example.com/user/repo/v23$'
+
+! go mod init example.com/user/repo/v1
+stderr '(?s)^go: invalid module path "example.com/user/repo/v1": major version suffixes must be in the form of /vN and are only allowed for v2 or later(.*)go mod init example.com/user/repo/v2$'
+
+! go mod init example.com/user/repo/v2.0
+stderr '(?s)^go: invalid module path "example.com/user/repo/v2.0": major version suffixes must be in the form of /vN and are only allowed for v2 or later(.*)go mod init example.com/user/repo/v2$'
+
+! go mod init example.com/user/repo/v2.1.4
+stderr '(?s)^go: invalid module path "example.com/user/repo/v2.1.4": major version suffixes must be in the form of /vN and are only allowed for v2 or later(.*)go mod init example.com/user/repo/v2$'
+
+! go mod init example.com/user/repo/v3.5
+stderr '(?s)^go: invalid module path "example.com/user/repo/v3.5": major version suffixes must be in the form of /vN and are only allowed for v2 or later(.*)go mod init example.com/user/repo/v3$'
+
+! go mod init example.com/user/repo/v4.1.4
+stderr '(?s)^go: invalid module path "example.com/user/repo/v4.1.4": major version suffixes must be in the form of /vN and are only allowed for v2 or later(.*)go mod init example.com/user/repo/v4$'
+
+! go mod init example.com/user/repo/v.2.3
+stderr '(?s)^go: invalid module path "example.com/user/repo/v.2.3": major version suffixes must be in the form of /vN and are only allowed for v2 or later(.*)go mod init example.com/user/repo/v2$'
+
+! go mod init example.com/user/repo/v.5.3
+stderr '(?s)^go: invalid module path "example.com/user/repo/v.5.3": major version suffixes must be in the form of /vN and are only allowed for v2 or later(.*)go mod init example.com/user/repo/v5$'
+
+! go mod init gopkg.in/pkg
+stderr '(?s)^go: invalid module path "gopkg.in/pkg": module paths beginning with gopkg.in/ must always have a major version suffix in the form of .vN(.*)go mod init gopkg.in/pkg.v1$'
+
+! go mod init gopkg.in/user/pkg
+stderr '(?s)^go: invalid module path "gopkg.in/user/pkg": module paths beginning with gopkg.in/ must always have a major version suffix in the form of .vN(.*)go mod init gopkg.in/user/pkg.v1$'
+
+! go mod init gopkg.in/user/pkg/v0
+stderr '(?s)^go: invalid module path "gopkg.in/user/pkg/v0": module paths beginning with gopkg.in/ must always have a major version suffix in the form of .vN(.*)go mod init gopkg.in/user/pkg.v1$'
+
+! go mod init gopkg.in/user/pkg/v1
+stderr '(?s)^go: invalid module path "gopkg.in/user/pkg/v1": module paths beginning with gopkg.in/ must always have a major version suffix in the form of .vN(.*)go mod init gopkg.in/user/pkg.v1$'
+
+! go mod init gopkg.in/user/pkg/v2
+stderr '(?s)^go: invalid module path "gopkg.in/user/pkg/v2": module paths beginning with gopkg.in/ must always have a major version suffix in the form of .vN(.*)go mod init gopkg.in/user/pkg.v2$'
+
+! go mod init gopkg.in/user/pkg.v
+stderr '(?s)^go: invalid module path "gopkg.in/user/pkg.v": module paths beginning with gopkg.in/ must always have a major version suffix in the form of .vN(.*)go mod init gopkg.in/user/pkg.v1$'
+
+! go mod init gopkg.in/user/pkg.v0.1
+stderr '(?s)^go: invalid module path "gopkg.in/user/pkg.v0.1": module paths beginning with gopkg.in/ must always have a major version suffix in the form of .vN(.*)go mod init gopkg.in/user/pkg.v1$'
+
+! go mod init gopkg.in/user/pkg.v.1
+stderr '(?s)^go: invalid module path "gopkg.in/user/pkg.v.1": module paths beginning with gopkg.in/ must always have a major version suffix in the form of .vN(.*)go mod init gopkg.in/user/pkg.v1$'
+
+! go mod init gopkg.in/user/pkg.v01
+stderr '(?s)^go: invalid module path "gopkg.in/user/pkg.v01": module paths beginning with gopkg.in/ must always have a major version suffix in the form of .vN(.*)go mod init gopkg.in/user/pkg.v1$'
+
+! go mod init gopkg.in/user/pkg.v.2.3
+stderr '(?s)^go: invalid module path "gopkg.in/user/pkg.v.2.3": module paths beginning with gopkg.in/ must always have a major version suffix in the form of .vN(.*)go mod init gopkg.in/user/pkg.v2$'
+
+# module paths with a trailing dot are rejected as invalid import paths
+! go mod init example.com/user/repo/v2.
+stderr '(?s)^go: malformed module path "example.com/user/repo/v2.": trailing dot in path element$'
+
+! go mod init example.com/user/repo/v2..
+stderr '(?s)^go: malformed module path "example.com/user/repo/v2..": trailing dot in path element$'
+
+! go mod init gopkg.in/user/pkg.v.2.
+stderr '(?s)^go: malformed module path "gopkg.in/user/pkg.v.2.": trailing dot in path element$'
+
+! go mod init gopkg.in/user/pkg.v.2..
+stderr '(?s)^go: malformed module path "gopkg.in/user/pkg.v.2..": trailing dot in path element$'
+
+# module paths with spaces are also rejected
+! go mod init 'foo bar'
+stderr '(?s)^go: malformed module path "foo bar": invalid char '' ''$'
+
+! go mod init 'foo bar baz'
+stderr '(?s)^go: malformed module path "foo bar baz": invalid char '' ''$'
diff --git a/src/cmd/go/testdata/script/mod_lazy_import_allmod.txt b/src/cmd/go/testdata/script/mod_lazy_import_allmod.txt
index 97718c4513..60d4187b11 100644
--- a/src/cmd/go/testdata/script/mod_lazy_import_allmod.txt
+++ b/src/cmd/go/testdata/script/mod_lazy_import_allmod.txt
@@ -66,7 +66,7 @@ stdout '^b v0.1.0 '
! stdout '^c '
# After adding a new direct import of b/y,
-# the existing verison of b should be promoted to a root,
+# the existing version of b should be promoted to a root,
# bringing the version of c required by b into the build list.
cp m.go.new m.go
diff --git a/src/cmd/go/testdata/script/mod_tidy_compat.txt b/src/cmd/go/testdata/script/mod_tidy_compat.txt
index e6edef5ee3..29cae17881 100644
--- a/src/cmd/go/testdata/script/mod_tidy_compat.txt
+++ b/src/cmd/go/testdata/script/mod_tidy_compat.txt
@@ -20,7 +20,7 @@ env MODFMT='{{with .Module}}{{.Path}} {{.Version}}{{end}}'
# + ---- example.net/lazy v0.1.0 ---- example.com/version v1.0.1
#
# Go 1.17 avoids loading the go.mod file for example.com/version v1.0.1
-# (because it is lower than the verison explicitly required by m,
+# (because it is lower than the version explicitly required by m,
# and the module that requires it — m — specifies 'go 1.17').
#
# That go.mod file happens not to affect the final 1.16 module graph anyway,
diff --git a/src/cmd/go/testdata/script/mod_tidy_lazy_self.txt b/src/cmd/go/testdata/script/mod_tidy_lazy_self.txt
index ffcea18603..9abbabd2eb 100644
--- a/src/cmd/go/testdata/script/mod_tidy_lazy_self.txt
+++ b/src/cmd/go/testdata/script/mod_tidy_lazy_self.txt
@@ -2,18 +2,13 @@
# 'go mod tidy' should not panic if the main module initially
# requires an older version of itself.
+# A module may require an older version of itself without error. This is
+# inconsistent (the required version is never selected), but we still get
+# a reproducible build list.
+go list -m all
+stdout '^golang.org/issue/46078$'
-# A module that explicitly requires an older version of itself should be
-# rejected as inconsistent: we enforce that every explicit requirement is the
-# selected version of its module path, but the selected version of the main
-# module is always itself — not some explicit version.
-
-! go list -m all
-stderr '^go: updates to go\.mod needed; to update it:\n\tgo mod tidy$'
-
-
-# The suggested 'go mod tidy' command should succeed (not crash).
-
+# 'go mod tidy' should fix this (and not crash).
go mod tidy
diff --git a/src/cmd/go/testdata/script/mod_vendor_goversion.txt b/src/cmd/go/testdata/script/mod_vendor_goversion.txt
index aa4cb41171..7f1966c8e8 100644
--- a/src/cmd/go/testdata/script/mod_vendor_goversion.txt
+++ b/src/cmd/go/testdata/script/mod_vendor_goversion.txt
@@ -26,7 +26,7 @@ go mod vendor
! grep 1.17 vendor/modules.txt
! go build example.net/need117
-stderr '^vendor[/\\]example\.net[/\\]need117[/\\]need117.go:5:18: .*\n\tconversion of slices to array pointers only supported as of -lang=go1\.17'
+stderr '^vendor[/\\]example\.net[/\\]need117[/\\]need117.go:5:1[89]: .*conversion of slices to array pointers only supported as of -lang=go1\.17'
! grep 1.13 vendor/modules.txt
go build example.net/bad114
diff --git a/src/cmd/go/testdata/script/mod_vendor_redundant_requirement.txt b/src/cmd/go/testdata/script/mod_vendor_redundant_requirement.txt
new file mode 100644
index 0000000000..3f6f5c5276
--- /dev/null
+++ b/src/cmd/go/testdata/script/mod_vendor_redundant_requirement.txt
@@ -0,0 +1,29 @@
+# 'go list -mod=vendor' should succeed even when go.mod contains redundant
+# requirements. Verifies #47565.
+go list -mod=vendor
+
+-- go.mod --
+module m
+
+go 1.17
+
+require example.com/m v0.0.0
+require example.com/m v0.0.0
+
+replace example.com/m v0.0.0 => ./m
+-- m/go.mod --
+module example.com/m
+
+go 1.17
+-- m/m.go --
+package m
+-- use.go --
+package use
+
+import _ "example.com/m"
+-- vendor/example.com/m/m.go --
+package m
+-- vendor/modules.txt --
+# example.com/m v0.0.0 => ./m
+## explicit; go 1.17
+example.com/m
diff --git a/src/cmd/go/testdata/script/test_cache_inputs.txt b/src/cmd/go/testdata/script/test_cache_inputs.txt
index d694a30994..3705c700d1 100644
--- a/src/cmd/go/testdata/script/test_cache_inputs.txt
+++ b/src/cmd/go/testdata/script/test_cache_inputs.txt
@@ -108,6 +108,12 @@ go test testcache -run=Benchtime -bench=Benchtime -benchtime=1x
go test testcache -run=Benchtime -bench=Benchtime -benchtime=1x
! stdout '\(cached\)'
+# golang.org/issue/47355: that includes the `-failfast` argument.
+go test testcache -run=TestOSArgs -failfast
+! stdout '\(cached\)'
+go test testcache -run=TestOSArgs -failfast
+stdout '\(cached\)'
+
# Executables within GOROOT and GOPATH should affect caching,
# even if the test does not stat them explicitly.
diff --git a/src/cmd/go/testdata/script/test_vet.txt b/src/cmd/go/testdata/script/test_vet.txt
index 5af26b54f9..2e0ae1956a 100644
--- a/src/cmd/go/testdata/script/test_vet.txt
+++ b/src/cmd/go/testdata/script/test_vet.txt
@@ -16,6 +16,11 @@ go test -vet=off p1.go
! stderr '[\\/]vet.*-shift'
stdout '\[no test files\]'
+# ensure all runs non-default vet
+! go test -vet=all ./vetall/...
+stderr 'using resp before checking for errors'
+
+
# Test issue #22890
go test m/vetcycle
stdout 'm/vetcycle.*\[no test files\]'
@@ -51,6 +56,21 @@ import "fmt"
func F() {
fmt.Printf("%d") // oops
}
+-- vetall/p.go --
+package p
+
+import "net/http"
+
+func F() {
+ resp, err := http.Head("example.com")
+ defer resp.Body.Close()
+ if err != nil {
+ panic(err)
+ }
+ // (defer statement belongs here)
+}
+-- vetall/p_test.go --
+package p
-- vetcycle/p.go --
package p
diff --git a/src/cmd/go/testdata/script/work.txt b/src/cmd/go/testdata/script/work.txt
index 095d6ff174..529c1c0bfd 100644
--- a/src/cmd/go/testdata/script/work.txt
+++ b/src/cmd/go/testdata/script/work.txt
@@ -50,7 +50,7 @@ go build -n -workfile=off -o foo foo.go
go build -n -o foo foo.go
-- go.work.dup --
-go 1.17
+go 1.18
directory (
a
@@ -58,14 +58,14 @@ directory (
../src/a
)
-- go.work.want --
-go 1.17
+go 1.18
directory (
./a
./b
)
-- go.work.d --
-go 1.17
+go 1.18
directory (
a
diff --git a/src/cmd/go/testdata/script/work_edit.txt b/src/cmd/go/testdata/script/work_edit.txt
index 0717086ee7..0de4069865 100644
--- a/src/cmd/go/testdata/script/work_edit.txt
+++ b/src/cmd/go/testdata/script/work_edit.txt
@@ -34,11 +34,11 @@ go mod editwork -print -fmt -workfile unformatted
cmp stdout formatted
-- go.work.want_initial --
-go 1.17
+go 1.18
directory m
-- go.work.want_directory_n --
-go 1.17
+go 1.18
directory (
m
@@ -132,7 +132,7 @@ replace x.1 v1.4.0 => ../z
]
}
-- unformatted --
-go 1.17
+go 1.18
directory (
a
b
@@ -143,7 +143,7 @@ go 1.17
x.1 v1.4.0 => ../z
)
-- formatted --
-go 1.17
+go 1.18
directory (
a
diff --git a/src/cmd/gofmt/gofmt_test.go b/src/cmd/gofmt/gofmt_test.go
index f0d3f8780f..9ef7676214 100644
--- a/src/cmd/gofmt/gofmt_test.go
+++ b/src/cmd/gofmt/gofmt_test.go
@@ -54,8 +54,6 @@ func gofmtFlags(filename string, maxLines int) string {
return ""
}
-var typeParamsEnabled = false
-
func runTest(t *testing.T, in, out string) {
// process flags
*simplifyAST = false
@@ -78,11 +76,6 @@ func runTest(t *testing.T, in, out string) {
case "-stdin":
// fake flag - pretend input is from stdin
stdin = true
- case "-G":
- // fake flag - test is for generic code
- if !typeParamsEnabled {
- return
- }
default:
t.Errorf("unrecognized flag name: %s", name)
}
diff --git a/src/cmd/gofmt/testdata/typeparams.golden b/src/cmd/gofmt/testdata/typeparams.golden
index 35f08d1379..f71bd130db 100644
--- a/src/cmd/gofmt/testdata/typeparams.golden
+++ b/src/cmd/gofmt/testdata/typeparams.golden
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-//gofmt -G
+//gofmt
package typeparams
diff --git a/src/cmd/gofmt/testdata/typeparams.input b/src/cmd/gofmt/testdata/typeparams.input
index 7f3212c8e4..5d4c53d9f7 100644
--- a/src/cmd/gofmt/testdata/typeparams.input
+++ b/src/cmd/gofmt/testdata/typeparams.input
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-//gofmt -G
+//gofmt
package typeparams
diff --git a/src/cmd/internal/buildid/buildid_test.go b/src/cmd/internal/buildid/buildid_test.go
index e832f9987e..4895a49e11 100644
--- a/src/cmd/internal/buildid/buildid_test.go
+++ b/src/cmd/internal/buildid/buildid_test.go
@@ -177,3 +177,11 @@ func TestExcludedReader(t *testing.T) {
}
}
}
+
+func TestEmptyID(t *testing.T) {
+ r := strings.NewReader("aha!")
+ matches, hash, err := FindAndHash(r, "", 1000)
+ if matches != nil || hash != ([32]byte{}) || err == nil || !strings.Contains(err.Error(), "no id") {
+ t.Errorf("FindAndHash: want nil, [32]byte{}, no id specified, got %v, %v, %v", matches, hash, err)
+ }
+}
diff --git a/src/cmd/internal/buildid/rewrite.go b/src/cmd/internal/buildid/rewrite.go
index a7928959c4..8814950db0 100644
--- a/src/cmd/internal/buildid/rewrite.go
+++ b/src/cmd/internal/buildid/rewrite.go
@@ -22,6 +22,9 @@ func FindAndHash(r io.Reader, id string, bufSize int) (matches []int64, hash [32
if bufSize == 0 {
bufSize = 31 * 1024 // bufSize+little will likely fit in 32 kB
}
+ if len(id) == 0 {
+ return nil, [32]byte{}, fmt.Errorf("buildid.FindAndHash: no id specified")
+ }
if len(id) > bufSize {
return nil, [32]byte{}, fmt.Errorf("buildid.FindAndHash: buffer too small")
}
diff --git a/src/cmd/internal/dwarf/dwarf.go b/src/cmd/internal/dwarf/dwarf.go
index 54c4c4d56d..4e163db020 100644
--- a/src/cmd/internal/dwarf/dwarf.go
+++ b/src/cmd/internal/dwarf/dwarf.go
@@ -1266,7 +1266,7 @@ func PutAbstractFunc(ctxt Context, s *FnState) error {
// its corresponding 'abstract' DIE (containing location-independent
// attributes such as name, type, etc). Inlined subroutine DIEs can
// have other inlined subroutine DIEs as children.
-func putInlinedFunc(ctxt Context, s *FnState, callersym Sym, callIdx int) error {
+func putInlinedFunc(ctxt Context, s *FnState, callIdx int) error {
ic := s.InlCalls.Calls[callIdx]
callee := ic.AbsFunSym
@@ -1277,7 +1277,7 @@ func putInlinedFunc(ctxt Context, s *FnState, callersym Sym, callIdx int) error
Uleb128put(ctxt, s.Info, int64(abbrev))
if logDwarf {
- ctxt.Logf("putInlinedFunc(caller=%v,callee=%v,abbrev=%d)\n", callersym, callee, abbrev)
+ ctxt.Logf("putInlinedFunc(callee=%v,abbrev=%d)\n", callee, abbrev)
}
// Abstract origin.
@@ -1312,8 +1312,7 @@ func putInlinedFunc(ctxt Context, s *FnState, callersym Sym, callIdx int) error
// Children of this inline.
for _, sib := range inlChildren(callIdx, &s.InlCalls) {
- absfn := s.InlCalls.Calls[sib].AbsFunSym
- err := putInlinedFunc(ctxt, s, absfn, sib)
+ err := putInlinedFunc(ctxt, s, sib)
if err != nil {
return err
}
@@ -1354,8 +1353,7 @@ func PutConcreteFunc(ctxt Context, s *FnState) error {
// Inlined subroutines.
for _, sib := range inlChildren(-1, &s.InlCalls) {
- absfn := s.InlCalls.Calls[sib].AbsFunSym
- err := putInlinedFunc(ctxt, s, absfn, sib)
+ err := putInlinedFunc(ctxt, s, sib)
if err != nil {
return err
}
@@ -1402,8 +1400,7 @@ func PutDefaultFunc(ctxt Context, s *FnState) error {
// Inlined subroutines.
for _, sib := range inlChildren(-1, &s.InlCalls) {
- absfn := s.InlCalls.Calls[sib].AbsFunSym
- err := putInlinedFunc(ctxt, s, absfn, sib)
+ err := putInlinedFunc(ctxt, s, sib)
if err != nil {
return err
}
diff --git a/src/cmd/internal/goobj/builtinlist.go b/src/cmd/internal/goobj/builtinlist.go
index 9f248137da..608c0d7222 100644
--- a/src/cmd/internal/goobj/builtinlist.go
+++ b/src/cmd/internal/goobj/builtinlist.go
@@ -33,6 +33,7 @@ var builtins = [...]struct {
{"runtime.goPanicSlice3BU", 1},
{"runtime.goPanicSlice3C", 1},
{"runtime.goPanicSlice3CU", 1},
+ {"runtime.goPanicSliceConvert", 1},
{"runtime.printbool", 1},
{"runtime.printfloat", 1},
{"runtime.printint", 1},
@@ -129,6 +130,8 @@ var builtins = [...]struct {
{"runtime.makeslice64", 1},
{"runtime.makeslicecopy", 1},
{"runtime.growslice", 1},
+ {"runtime.unsafeslice", 1},
+ {"runtime.unsafeslice64", 1},
{"runtime.memmove", 1},
{"runtime.memclrNoHeapPointers", 1},
{"runtime.memclrHasPointers", 1},
@@ -203,7 +206,9 @@ var builtins = [...]struct {
{"runtime.newproc", 1},
{"runtime.panicoverflow", 1},
{"runtime.sigpanic", 1},
- {"runtime.gcWriteBarrier", 0},
+ {"runtime.gcWriteBarrier", 1},
+ {"runtime.duffzero", 1},
+ {"runtime.duffcopy", 1},
{"runtime.morestack", 0},
{"runtime.morestackc", 0},
{"runtime.morestack_noctxt", 0},
diff --git a/src/cmd/internal/goobj/mkbuiltin.go b/src/cmd/internal/goobj/mkbuiltin.go
index 18b969586c..c9995fcede 100644
--- a/src/cmd/internal/goobj/mkbuiltin.go
+++ b/src/cmd/internal/goobj/mkbuiltin.go
@@ -151,7 +151,9 @@ var fextras = [...]extra{
{"sigpanic", 1},
// compiler backend inserted calls
- {"gcWriteBarrier", 0}, // asm function, ABI0
+ {"gcWriteBarrier", 1},
+ {"duffzero", 1},
+ {"duffcopy", 1},
// assembler backend inserted calls
{"morestack", 0}, // asm function, ABI0
diff --git a/src/cmd/internal/obj/arm/asm5.go b/src/cmd/internal/obj/arm/asm5.go
index ccf5f9e7f8..7b1682776e 100644
--- a/src/cmd/internal/obj/arm/asm5.go
+++ b/src/cmd/internal/obj/arm/asm5.go
@@ -355,11 +355,10 @@ var oprange [ALAST & obj.AMask][]Optab
var xcmp [C_GOK + 1][C_GOK + 1]bool
var (
- deferreturn *obj.LSym
- symdiv *obj.LSym
- symdivu *obj.LSym
- symmod *obj.LSym
- symmodu *obj.LSym
+ symdiv *obj.LSym
+ symdivu *obj.LSym
+ symmod *obj.LSym
+ symmodu *obj.LSym
)
// Note about encoding: Prog.scond holds the condition encoding,
@@ -1219,8 +1218,6 @@ func buildop(ctxt *obj.Link) {
return
}
- deferreturn = ctxt.LookupABI("runtime.deferreturn", obj.ABIInternal)
-
symdiv = ctxt.Lookup("runtime._div")
symdivu = ctxt.Lookup("runtime._divu")
symmod = ctxt.Lookup("runtime._mod")
diff --git a/src/cmd/internal/obj/arm64/asm7.go b/src/cmd/internal/obj/arm64/asm7.go
index d99afa3d27..8db25cf967 100644
--- a/src/cmd/internal/obj/arm64/asm7.go
+++ b/src/cmd/internal/obj/arm64/asm7.go
@@ -361,12 +361,12 @@ var optab = []Optab{
{AANDS, C_REG, C_NONE, C_NONE, C_REG, 1, 4, 0, 0, 0},
{ATST, C_REG, C_REG, C_NONE, C_NONE, 1, 4, 0, 0, 0},
{AAND, C_MBCON, C_REG, C_NONE, C_RSP, 53, 4, 0, 0, 0},
- {AAND, C_MBCON, C_NONE, C_NONE, C_REG, 53, 4, 0, 0, 0},
+ {AAND, C_MBCON, C_NONE, C_NONE, C_RSP, 53, 4, 0, 0, 0},
{AANDS, C_MBCON, C_REG, C_NONE, C_REG, 53, 4, 0, 0, 0},
{AANDS, C_MBCON, C_NONE, C_NONE, C_REG, 53, 4, 0, 0, 0},
{ATST, C_MBCON, C_REG, C_NONE, C_NONE, 53, 4, 0, 0, 0},
{AAND, C_BITCON, C_REG, C_NONE, C_RSP, 53, 4, 0, 0, 0},
- {AAND, C_BITCON, C_NONE, C_NONE, C_REG, 53, 4, 0, 0, 0},
+ {AAND, C_BITCON, C_NONE, C_NONE, C_RSP, 53, 4, 0, 0, 0},
{AANDS, C_BITCON, C_REG, C_NONE, C_REG, 53, 4, 0, 0, 0},
{AANDS, C_BITCON, C_NONE, C_NONE, C_REG, 53, 4, 0, 0, 0},
{ATST, C_BITCON, C_REG, C_NONE, C_NONE, 53, 4, 0, 0, 0},
@@ -404,6 +404,8 @@ var optab = []Optab{
/* TODO: MVN C_SHIFT */
/* MOVs that become MOVK/MOVN/MOVZ/ADD/SUB/OR */
+ {AMOVW, C_MBCON, C_NONE, C_NONE, C_REG, 32, 4, 0, 0, 0},
+ {AMOVD, C_MBCON, C_NONE, C_NONE, C_REG, 32, 4, 0, 0, 0},
{AMOVW, C_MOVCON, C_NONE, C_NONE, C_REG, 32, 4, 0, 0, 0},
{AMOVD, C_MOVCON, C_NONE, C_NONE, C_REG, 32, 4, 0, 0, 0},
{AMOVW, C_BITCON, C_NONE, C_NONE, C_RSP, 32, 4, 0, 0, 0},
@@ -415,7 +417,7 @@ var optab = []Optab{
{AMOVK, C_VCON, C_NONE, C_NONE, C_REG, 33, 4, 0, 0, 0},
{AMOVD, C_AACON, C_NONE, C_NONE, C_RSP, 4, 4, REGFROM, 0, 0},
- {AMOVD, C_AACON2, C_NONE, C_NONE, C_RSP, 4, 8, REGFROM, 0, 0},
+ {AMOVD, C_AACON2, C_NONE, C_NONE, C_RSP, 4, 8, REGFROM, NOTUSETMP, 0},
/* load long effective stack address (load int32 offset and add) */
{AMOVD, C_LACON, C_NONE, C_NONE, C_RSP, 34, 8, REGSP, LFROM, 0},
@@ -692,13 +694,12 @@ var optab = []Optab{
{AFMOVD, C_FREG, C_NONE, C_NONE, C_ROFF, 99, 4, 0, 0, 0},
/* pre/post-indexed/signed-offset load/store register pair
- (unscaled, signed 10-bit quad-aligned and long offset) */
+ (unscaled, signed 10-bit quad-aligned and long offset).
+ The pre/post-indexed format only supports OREG cases because
+ the RSP and pseudo registers are not allowed to be modified
+ in this way. */
{AFLDPQ, C_NQAUTO_16, C_NONE, C_NONE, C_PAIR, 66, 4, REGSP, 0, 0},
- {AFLDPQ, C_NQAUTO_16, C_NONE, C_NONE, C_PAIR, 66, 4, REGSP, 0, C_XPRE},
- {AFLDPQ, C_NQAUTO_16, C_NONE, C_NONE, C_PAIR, 66, 4, REGSP, 0, C_XPOST},
{AFLDPQ, C_PQAUTO_16, C_NONE, C_NONE, C_PAIR, 66, 4, REGSP, 0, 0},
- {AFLDPQ, C_PQAUTO_16, C_NONE, C_NONE, C_PAIR, 66, 4, REGSP, 0, C_XPRE},
- {AFLDPQ, C_PQAUTO_16, C_NONE, C_NONE, C_PAIR, 66, 4, REGSP, 0, C_XPOST},
{AFLDPQ, C_UAUTO4K, C_NONE, C_NONE, C_PAIR, 74, 8, REGSP, 0, 0},
{AFLDPQ, C_NAUTO4K, C_NONE, C_NONE, C_PAIR, 74, 8, REGSP, 0, 0},
{AFLDPQ, C_LAUTO, C_NONE, C_NONE, C_PAIR, 75, 12, REGSP, LFROM, 0},
@@ -714,11 +715,7 @@ var optab = []Optab{
{AFLDPQ, C_ADDR, C_NONE, C_NONE, C_PAIR, 88, 12, 0, 0, 0},
{AFSTPQ, C_PAIR, C_NONE, C_NONE, C_NQAUTO_16, 67, 4, REGSP, 0, 0},
- {AFSTPQ, C_PAIR, C_NONE, C_NONE, C_NQAUTO_16, 67, 4, REGSP, 0, C_XPRE},
- {AFSTPQ, C_PAIR, C_NONE, C_NONE, C_NQAUTO_16, 67, 4, REGSP, 0, C_XPOST},
{AFSTPQ, C_PAIR, C_NONE, C_NONE, C_PQAUTO_16, 67, 4, REGSP, 0, 0},
- {AFSTPQ, C_PAIR, C_NONE, C_NONE, C_PQAUTO_16, 67, 4, REGSP, 0, C_XPRE},
- {AFSTPQ, C_PAIR, C_NONE, C_NONE, C_PQAUTO_16, 67, 4, REGSP, 0, C_XPOST},
{AFSTPQ, C_PAIR, C_NONE, C_NONE, C_UAUTO4K, 76, 8, REGSP, 0, 0},
{AFSTPQ, C_PAIR, C_NONE, C_NONE, C_NAUTO4K, 76, 8, REGSP, 0, 0},
{AFSTPQ, C_PAIR, C_NONE, C_NONE, C_LAUTO, 77, 12, REGSP, LTO, 0},
@@ -734,11 +731,7 @@ var optab = []Optab{
{AFSTPQ, C_PAIR, C_NONE, C_NONE, C_ADDR, 87, 12, 0, 0, 0},
{ALDP, C_NPAUTO, C_NONE, C_NONE, C_PAIR, 66, 4, REGSP, 0, 0},
- {ALDP, C_NPAUTO, C_NONE, C_NONE, C_PAIR, 66, 4, REGSP, 0, C_XPRE},
- {ALDP, C_NPAUTO, C_NONE, C_NONE, C_PAIR, 66, 4, REGSP, 0, C_XPOST},
{ALDP, C_PPAUTO, C_NONE, C_NONE, C_PAIR, 66, 4, REGSP, 0, 0},
- {ALDP, C_PPAUTO, C_NONE, C_NONE, C_PAIR, 66, 4, REGSP, 0, C_XPRE},
- {ALDP, C_PPAUTO, C_NONE, C_NONE, C_PAIR, 66, 4, REGSP, 0, C_XPOST},
{ALDP, C_UAUTO4K, C_NONE, C_NONE, C_PAIR, 74, 8, REGSP, 0, 0},
{ALDP, C_NAUTO4K, C_NONE, C_NONE, C_PAIR, 74, 8, REGSP, 0, 0},
{ALDP, C_LAUTO, C_NONE, C_NONE, C_PAIR, 75, 12, REGSP, LFROM, 0},
@@ -754,11 +747,7 @@ var optab = []Optab{
{ALDP, C_ADDR, C_NONE, C_NONE, C_PAIR, 88, 12, 0, 0, 0},
{ASTP, C_PAIR, C_NONE, C_NONE, C_NPAUTO, 67, 4, REGSP, 0, 0},
- {ASTP, C_PAIR, C_NONE, C_NONE, C_NPAUTO, 67, 4, REGSP, 0, C_XPRE},
- {ASTP, C_PAIR, C_NONE, C_NONE, C_NPAUTO, 67, 4, REGSP, 0, C_XPOST},
{ASTP, C_PAIR, C_NONE, C_NONE, C_PPAUTO, 67, 4, REGSP, 0, 0},
- {ASTP, C_PAIR, C_NONE, C_NONE, C_PPAUTO, 67, 4, REGSP, 0, C_XPRE},
- {ASTP, C_PAIR, C_NONE, C_NONE, C_PPAUTO, 67, 4, REGSP, 0, C_XPOST},
{ASTP, C_PAIR, C_NONE, C_NONE, C_UAUTO4K, 76, 8, REGSP, 0, 0},
{ASTP, C_PAIR, C_NONE, C_NONE, C_NAUTO4K, 76, 8, REGSP, 0, 0},
{ASTP, C_PAIR, C_NONE, C_NONE, C_LAUTO, 77, 12, REGSP, LTO, 0},
@@ -775,11 +764,7 @@ var optab = []Optab{
// differ from LDP/STP for C_NSAUTO_4/C_PSAUTO_4/C_NSOREG_4/C_PSOREG_4
{ALDPW, C_NSAUTO_4, C_NONE, C_NONE, C_PAIR, 66, 4, REGSP, 0, 0},
- {ALDPW, C_NSAUTO_4, C_NONE, C_NONE, C_PAIR, 66, 4, REGSP, 0, C_XPRE},
- {ALDPW, C_NSAUTO_4, C_NONE, C_NONE, C_PAIR, 66, 4, REGSP, 0, C_XPOST},
{ALDPW, C_PSAUTO_4, C_NONE, C_NONE, C_PAIR, 66, 4, REGSP, 0, 0},
- {ALDPW, C_PSAUTO_4, C_NONE, C_NONE, C_PAIR, 66, 4, REGSP, 0, C_XPRE},
- {ALDPW, C_PSAUTO_4, C_NONE, C_NONE, C_PAIR, 66, 4, REGSP, 0, C_XPOST},
{ALDPW, C_UAUTO4K, C_NONE, C_NONE, C_PAIR, 74, 8, REGSP, 0, 0},
{ALDPW, C_NAUTO4K, C_NONE, C_NONE, C_PAIR, 74, 8, REGSP, 0, 0},
{ALDPW, C_LAUTO, C_NONE, C_NONE, C_PAIR, 75, 12, REGSP, LFROM, 0},
@@ -795,11 +780,7 @@ var optab = []Optab{
{ALDPW, C_ADDR, C_NONE, C_NONE, C_PAIR, 88, 12, 0, 0, 0},
{ASTPW, C_PAIR, C_NONE, C_NONE, C_NSAUTO_4, 67, 4, REGSP, 0, 0},
- {ASTPW, C_PAIR, C_NONE, C_NONE, C_NSAUTO_4, 67, 4, REGSP, 0, C_XPRE},
- {ASTPW, C_PAIR, C_NONE, C_NONE, C_NSAUTO_4, 67, 4, REGSP, 0, C_XPOST},
{ASTPW, C_PAIR, C_NONE, C_NONE, C_PSAUTO_4, 67, 4, REGSP, 0, 0},
- {ASTPW, C_PAIR, C_NONE, C_NONE, C_PSAUTO_4, 67, 4, REGSP, 0, C_XPRE},
- {ASTPW, C_PAIR, C_NONE, C_NONE, C_PSAUTO_4, 67, 4, REGSP, 0, C_XPOST},
{ASTPW, C_PAIR, C_NONE, C_NONE, C_UAUTO4K, 76, 8, REGSP, 0, 0},
{ASTPW, C_PAIR, C_NONE, C_NONE, C_NAUTO4K, 76, 8, REGSP, 0, 0},
{ASTPW, C_PAIR, C_NONE, C_NONE, C_LAUTO, 77, 12, REGSP, LTO, 0},
@@ -2089,13 +2070,18 @@ func cmp(a int, b int) bool {
return true
}
+ case C_MBCON:
+ if b == C_ABCON0 {
+ return true
+ }
+
case C_BITCON:
if b == C_ABCON0 || b == C_ABCON || b == C_MBCON {
return true
}
case C_MOVCON:
- if b == C_MBCON || b == C_ZCON || b == C_ADDCON0 || b == C_AMCON {
+ if b == C_MBCON || b == C_ZCON || b == C_ADDCON0 || b == C_ABCON0 || b == C_AMCON {
return true
}
@@ -3299,8 +3285,10 @@ func (c *ctxt7) asmout(p *obj.Prog, o *Optab, out []uint32) {
}
if int(o.size) == 8 {
- o1 = c.oaddi(p, op, v&0xfff000, r, REGTMP)
- o2 = c.oaddi(p, op, v&0x000fff, REGTMP, rt)
+ // NOTE: this case does not use REGTMP. If it ever does,
+ // remove the NOTUSETMP flag in optab.
+ o1 = c.oaddi(p, op, v&0xfff000, r, rt)
+ o2 = c.oaddi(p, op, v&0x000fff, rt, rt)
break
}
@@ -4198,6 +4186,10 @@ func (c *ctxt7) asmout(p *obj.Prog, o *Optab, out []uint32) {
if r == 0 {
r = rt
}
+ if r == REG_RSP {
+ c.ctxt.Diag("illegal source register: %v", p)
+ break
+ }
mode := 64
v := uint64(p.From.Offset)
switch p.As {
@@ -7039,8 +7031,8 @@ func (c *ctxt7) omovlit(as obj.As, p *obj.Prog, a *obj.Addr, dr int) uint32 {
// load a constant (MOVCON or BITCON) in a into rt
func (c *ctxt7) omovconst(as obj.As, p *obj.Prog, a *obj.Addr, rt int) (o1 uint32) {
- if cls := oclass(a); cls == C_BITCON || cls == C_ABCON || cls == C_ABCON0 {
- // or $bitcon, REGZERO, rt
+ if cls := oclass(a); (cls == C_BITCON || cls == C_ABCON || cls == C_ABCON0) && rt != REGZERO {
+ // or $bitcon, REGZERO, rt. rt can't be ZR.
mode := 64
var as1 obj.As
switch as {
diff --git a/src/cmd/internal/obj/arm64/obj7.go b/src/cmd/internal/obj/arm64/obj7.go
index e41fb3bb75..a043d0972c 100644
--- a/src/cmd/internal/obj/arm64/obj7.go
+++ b/src/cmd/internal/obj/arm64/obj7.go
@@ -52,7 +52,7 @@ var complements = []obj.As{
}
func (c *ctxt7) stacksplit(p *obj.Prog, framesize int32) *obj.Prog {
- // MOV g_stackguard(g), R1
+ // MOV g_stackguard(g), RT1
p = obj.Appendp(p, c.newprog)
p.As = AMOVD
@@ -63,7 +63,7 @@ func (c *ctxt7) stacksplit(p *obj.Prog, framesize int32) *obj.Prog {
p.From.Offset = 3 * int64(c.ctxt.Arch.PtrSize) // G.stackguard1
}
p.To.Type = obj.TYPE_REG
- p.To.Reg = REG_R1
+ p.To.Reg = REGRT1
// Mark the stack bound check and morestack call async nonpreemptible.
// If we get preempted here, when resumed the preemption request is
@@ -74,25 +74,25 @@ func (c *ctxt7) stacksplit(p *obj.Prog, framesize int32) *obj.Prog {
q := (*obj.Prog)(nil)
if framesize <= objabi.StackSmall {
// small stack: SP < stackguard
- // MOV SP, R2
- // CMP stackguard, R2
+ // MOV SP, RT2
+ // CMP stackguard, RT2
p = obj.Appendp(p, c.newprog)
p.As = AMOVD
p.From.Type = obj.TYPE_REG
p.From.Reg = REGSP
p.To.Type = obj.TYPE_REG
- p.To.Reg = REG_R2
+ p.To.Reg = REGRT2
p = obj.Appendp(p, c.newprog)
p.As = ACMP
p.From.Type = obj.TYPE_REG
- p.From.Reg = REG_R1
- p.Reg = REG_R2
+ p.From.Reg = REGRT1
+ p.Reg = REGRT2
} else if framesize <= objabi.StackBig {
// large stack: SP-framesize < stackguard-StackSmall
- // SUB $(framesize-StackSmall), SP, R2
- // CMP stackguard, R2
+ // SUB $(framesize-StackSmall), SP, RT2
+ // CMP stackguard, RT2
p = obj.Appendp(p, c.newprog)
p.As = ASUB
@@ -100,13 +100,13 @@ func (c *ctxt7) stacksplit(p *obj.Prog, framesize int32) *obj.Prog {
p.From.Offset = int64(framesize) - objabi.StackSmall
p.Reg = REGSP
p.To.Type = obj.TYPE_REG
- p.To.Reg = REG_R2
+ p.To.Reg = REGRT2
p = obj.Appendp(p, c.newprog)
p.As = ACMP
p.From.Type = obj.TYPE_REG
- p.From.Reg = REG_R1
- p.Reg = REG_R2
+ p.From.Reg = REGRT1
+ p.Reg = REGRT2
} else {
// Such a large stack we need to protect against underflow.
// The runtime guarantees SP > objabi.StackBig, but
@@ -115,10 +115,10 @@ func (c *ctxt7) stacksplit(p *obj.Prog, framesize int32) *obj.Prog {
// stack guard to incorrectly succeed. We explicitly
// guard against underflow.
//
- // SUBS $(framesize-StackSmall), SP, R2
+ // SUBS $(framesize-StackSmall), SP, RT2
// // On underflow, jump to morestack
// BLO label_of_call_to_morestack
- // CMP stackguard, R2
+ // CMP stackguard, RT2
p = obj.Appendp(p, c.newprog)
p.As = ASUBS
@@ -126,7 +126,7 @@ func (c *ctxt7) stacksplit(p *obj.Prog, framesize int32) *obj.Prog {
p.From.Offset = int64(framesize) - objabi.StackSmall
p.Reg = REGSP
p.To.Type = obj.TYPE_REG
- p.To.Reg = REG_R2
+ p.To.Reg = REGRT2
p = obj.Appendp(p, c.newprog)
q = p
@@ -136,8 +136,8 @@ func (c *ctxt7) stacksplit(p *obj.Prog, framesize int32) *obj.Prog {
p = obj.Appendp(p, c.newprog)
p.As = ACMP
p.From.Type = obj.TYPE_REG
- p.From.Reg = REG_R1
- p.Reg = REG_R2
+ p.From.Reg = REGRT1
+ p.Reg = REGRT2
}
// BLS do-morestack
@@ -161,17 +161,20 @@ func (c *ctxt7) stacksplit(p *obj.Prog, framesize int32) *obj.Prog {
pcdata := c.ctxt.EmitEntryStackMap(c.cursym, spfix, c.newprog)
pcdata = c.ctxt.StartUnsafePoint(pcdata, c.newprog)
+ if q != nil {
+ q.To.SetTarget(pcdata)
+ }
+ bls.To.SetTarget(pcdata)
+
+ spill := c.cursym.Func().SpillRegisterArgs(pcdata, c.newprog)
+
// MOV LR, R3
- movlr := obj.Appendp(pcdata, c.newprog)
+ movlr := obj.Appendp(spill, c.newprog)
movlr.As = AMOVD
movlr.From.Type = obj.TYPE_REG
movlr.From.Reg = REGLINK
movlr.To.Type = obj.TYPE_REG
movlr.To.Reg = REG_R3
- if q != nil {
- q.To.SetTarget(movlr)
- }
- bls.To.SetTarget(movlr)
debug := movlr
if false {
@@ -196,7 +199,8 @@ func (c *ctxt7) stacksplit(p *obj.Prog, framesize int32) *obj.Prog {
}
call.To.Sym = c.ctxt.Lookup(morestack)
- pcdata = c.ctxt.EndUnsafePoint(call, c.newprog, -1)
+ unspill := c.cursym.Func().UnspillRegisterArgs(call, c.newprog)
+ pcdata = c.ctxt.EndUnsafePoint(unspill, c.newprog, -1)
// B start
jmp := obj.Appendp(pcdata, c.newprog)
@@ -301,7 +305,9 @@ func progedit(ctxt *obj.Link, p *obj.Prog, newprog obj.ProgAlloc) {
// for both 32-bit and 64-bit. 32-bit ops will
// zero the high 32-bit of the destination register
// anyway.
- if (isANDWop(p.As) || isADDWop(p.As) || p.As == AMOVW) && p.From.Type == obj.TYPE_CONST {
+ // For MOVW, the destination register can't be ZR,
+ // so don't bother rewriting it in this situation.
+ if (isANDWop(p.As) || isADDWop(p.As) || p.As == AMOVW && p.To.Reg != REGZERO) && p.From.Type == obj.TYPE_CONST {
v := p.From.Offset & 0xffffffff
p.From.Offset = v | v<<32
}
@@ -321,9 +327,9 @@ func (c *ctxt7) rewriteToUseGot(p *obj.Prog) {
// CALL REGTMP
var sym *obj.LSym
if p.As == obj.ADUFFZERO {
- sym = c.ctxt.Lookup("runtime.duffzero")
+ sym = c.ctxt.LookupABI("runtime.duffzero", obj.ABIInternal)
} else {
- sym = c.ctxt.Lookup("runtime.duffcopy")
+ sym = c.ctxt.LookupABI("runtime.duffcopy", obj.ABIInternal)
}
offset := p.To.Offset
p.As = AMOVD
@@ -631,38 +637,38 @@ func preprocess(ctxt *obj.Link, cursym *obj.LSym, newprog obj.ProgAlloc) {
if c.cursym.Func().Text.From.Sym.Wrapper() {
// if(g->panic != nil && g->panic->argp == FP) g->panic->argp = bottom-of-frame
//
- // MOV g_panic(g), R1
+ // MOV g_panic(g), RT1
// CBNZ checkargp
// end:
// NOP
// ... function body ...
// checkargp:
- // MOV panic_argp(R1), R2
- // ADD $(autosize+8), RSP, R3
- // CMP R2, R3
+ // MOV panic_argp(RT1), RT2
+ // ADD $(autosize+8), RSP, R20
+ // CMP RT2, R20
// BNE end
- // ADD $8, RSP, R4
- // MOVD R4, panic_argp(R1)
+ // ADD $8, RSP, R20
+ // MOVD R20, panic_argp(RT1)
// B end
//
// The NOP is needed to give the jumps somewhere to land.
// It is a liblink NOP, not an ARM64 NOP: it encodes to 0 instruction bytes.
q = q1
- // MOV g_panic(g), R1
+ // MOV g_panic(g), RT1
q = obj.Appendp(q, c.newprog)
q.As = AMOVD
q.From.Type = obj.TYPE_MEM
q.From.Reg = REGG
q.From.Offset = 4 * int64(c.ctxt.Arch.PtrSize) // G.panic
q.To.Type = obj.TYPE_REG
- q.To.Reg = REG_R1
+ q.To.Reg = REGRT1
- // CBNZ R1, checkargp
+ // CBNZ RT1, checkargp
cbnz := obj.Appendp(q, c.newprog)
cbnz.As = ACBNZ
cbnz.From.Type = obj.TYPE_REG
- cbnz.From.Reg = REG_R1
+ cbnz.From.Reg = REGRT1
cbnz.To.Type = obj.TYPE_BRANCH
// Empty branch target at the top of the function body
@@ -674,33 +680,33 @@ func preprocess(ctxt *obj.Link, cursym *obj.LSym, newprog obj.ProgAlloc) {
for last = end; last.Link != nil; last = last.Link {
}
- // MOV panic_argp(R1), R2
+ // MOV panic_argp(RT1), RT2
mov := obj.Appendp(last, c.newprog)
mov.As = AMOVD
mov.From.Type = obj.TYPE_MEM
- mov.From.Reg = REG_R1
+ mov.From.Reg = REGRT1
mov.From.Offset = 0 // Panic.argp
mov.To.Type = obj.TYPE_REG
- mov.To.Reg = REG_R2
+ mov.To.Reg = REGRT2
// CBNZ branches to the MOV above
cbnz.To.SetTarget(mov)
- // ADD $(autosize+8), SP, R3
+ // ADD $(autosize+8), SP, R20
q = obj.Appendp(mov, c.newprog)
q.As = AADD
q.From.Type = obj.TYPE_CONST
q.From.Offset = int64(c.autosize) + 8
q.Reg = REGSP
q.To.Type = obj.TYPE_REG
- q.To.Reg = REG_R3
+ q.To.Reg = REG_R20
- // CMP R2, R3
+ // CMP RT2, R20
q = obj.Appendp(q, c.newprog)
q.As = ACMP
q.From.Type = obj.TYPE_REG
- q.From.Reg = REG_R2
- q.Reg = REG_R3
+ q.From.Reg = REGRT2
+ q.Reg = REG_R20
// BNE end
q = obj.Appendp(q, c.newprog)
@@ -708,22 +714,22 @@ func preprocess(ctxt *obj.Link, cursym *obj.LSym, newprog obj.ProgAlloc) {
q.To.Type = obj.TYPE_BRANCH
q.To.SetTarget(end)
- // ADD $8, SP, R4
+ // ADD $8, SP, R20
q = obj.Appendp(q, c.newprog)
q.As = AADD
q.From.Type = obj.TYPE_CONST
q.From.Offset = 8
q.Reg = REGSP
q.To.Type = obj.TYPE_REG
- q.To.Reg = REG_R4
+ q.To.Reg = REG_R20
- // MOV R4, panic_argp(R1)
+ // MOV R20, panic_argp(RT1)
q = obj.Appendp(q, c.newprog)
q.As = AMOVD
q.From.Type = obj.TYPE_REG
- q.From.Reg = REG_R4
+ q.From.Reg = REG_R20
q.To.Type = obj.TYPE_MEM
- q.To.Reg = REG_R1
+ q.To.Reg = REGRT1
q.To.Offset = 0 // Panic.argp
// B end
diff --git a/src/cmd/internal/obj/objfile.go b/src/cmd/internal/obj/objfile.go
index 24fb5a19de..01466ea736 100644
--- a/src/cmd/internal/obj/objfile.go
+++ b/src/cmd/internal/obj/objfile.go
@@ -452,6 +452,11 @@ func (w *writer) contentHash(s *LSym) goobj.HashType {
binary.LittleEndian.PutUint64(tmp[6:14], uint64(r.Add))
h.Write(tmp[:])
rs := r.Sym
+ if rs == nil {
+ fmt.Printf("symbol: %s\n", s)
+ fmt.Printf("relocation: %#v\n", r)
+ panic("nil symbol target in relocation")
+ }
switch rs.PkgIdx {
case goobj.PkgIdxHashed64:
h.Write([]byte{0})
diff --git a/src/cmd/internal/obj/textflag.go b/src/cmd/internal/obj/textflag.go
index 881e192203..5ae75027c2 100644
--- a/src/cmd/internal/obj/textflag.go
+++ b/src/cmd/internal/obj/textflag.go
@@ -49,8 +49,8 @@ const (
// Function can call reflect.Type.Method or reflect.Type.MethodByName.
REFLECTMETHOD = 1024
- // Function is the top of the call stack. Call stack unwinders should stop
- // at this function.
+ // Function is the outermost frame of the call stack. Call stack unwinders
+ // should stop at this function.
TOPFRAME = 2048
// Function is an ABI wrapper.
diff --git a/src/cmd/internal/obj/wasm/wasmobj.go b/src/cmd/internal/obj/wasm/wasmobj.go
index ceeae7a257..4d276db678 100644
--- a/src/cmd/internal/obj/wasm/wasmobj.go
+++ b/src/cmd/internal/obj/wasm/wasmobj.go
@@ -129,8 +129,6 @@ var (
morestackNoCtxt *obj.LSym
gcWriteBarrier *obj.LSym
sigpanic *obj.LSym
- deferreturn *obj.LSym
- jmpdefer *obj.LSym
)
const (
@@ -143,10 +141,6 @@ func instinit(ctxt *obj.Link) {
morestackNoCtxt = ctxt.Lookup("runtime.morestack_noctxt")
gcWriteBarrier = ctxt.LookupABI("runtime.gcWriteBarrier", obj.ABIInternal)
sigpanic = ctxt.LookupABI("runtime.sigpanic", obj.ABIInternal)
- deferreturn = ctxt.LookupABI("runtime.deferreturn", obj.ABIInternal)
- // jmpdefer is defined in assembly as ABI0. The compiler will
- // generate a direct ABI0 call from Go, so look for that.
- jmpdefer = ctxt.LookupABI(`"".jmpdefer`, obj.ABI0)
}
func preprocess(ctxt *obj.Link, s *obj.LSym, newprog obj.ProgAlloc) {
@@ -423,12 +417,6 @@ func preprocess(ctxt *obj.Link, s *obj.LSym, newprog obj.ProgAlloc) {
pcAfterCall-- // sigpanic expects to be called without advancing the pc
}
- // jmpdefer manipulates the return address on the stack so deferreturn gets called repeatedly.
- // Model this in WebAssembly with a loop.
- if call.To.Sym == deferreturn {
- p = appendp(p, ALoop)
- }
-
// SP -= 8
p = appendp(p, AGet, regAddr(REG_SP))
p = appendp(p, AI32Const, constAddr(8))
@@ -479,15 +467,6 @@ func preprocess(ctxt *obj.Link, s *obj.LSym, newprog obj.ProgAlloc) {
break
}
- // jmpdefer removes the frame of deferreturn from the Go stack.
- // However, its WebAssembly function still returns normally,
- // so we need to return from deferreturn without removing its
- // stack frame (no RET), because the frame is already gone.
- if call.To.Sym == jmpdefer {
- p = appendp(p, AReturn)
- break
- }
-
// return value of call is on the top of the stack, indicating whether to unwind the WebAssembly stack
if call.As == ACALLNORESUME && call.To.Sym != sigpanic { // sigpanic unwinds the stack, but it never resumes
// trying to unwind WebAssembly stack but call has no resume point, terminate with error
@@ -500,21 +479,6 @@ func preprocess(ctxt *obj.Link, s *obj.LSym, newprog obj.ProgAlloc) {
unwindExitBranches = append(unwindExitBranches, p)
}
- // jump to before the call if jmpdefer has reset the return address to the call's PC
- if call.To.Sym == deferreturn {
- // get PC_B from -8(SP)
- p = appendp(p, AGet, regAddr(REG_SP))
- p = appendp(p, AI32Const, constAddr(8))
- p = appendp(p, AI32Sub)
- p = appendp(p, AI32Load16U, constAddr(0))
- p = appendp(p, ATee, regAddr(REG_PC_B))
-
- p = appendp(p, AI32Const, constAddr(call.Pc))
- p = appendp(p, AI32Eq)
- p = appendp(p, ABrIf, constAddr(0))
- p = appendp(p, AEnd) // end of Loop
- }
-
case obj.ARET, ARETUNWIND:
ret := *p
p.As = obj.ANOP
diff --git a/src/cmd/internal/obj/x86/asm6.go b/src/cmd/internal/obj/x86/asm6.go
index 17fa76727e..331a98dfef 100644
--- a/src/cmd/internal/obj/x86/asm6.go
+++ b/src/cmd/internal/obj/x86/asm6.go
@@ -43,7 +43,6 @@ import (
var (
plan9privates *obj.LSym
- deferreturn *obj.LSym
)
// Instruction layout.
diff --git a/src/cmd/internal/obj/x86/obj6.go b/src/cmd/internal/obj/x86/obj6.go
index e2732d53e3..183ca2ebe9 100644
--- a/src/cmd/internal/obj/x86/obj6.go
+++ b/src/cmd/internal/obj/x86/obj6.go
@@ -35,7 +35,6 @@ import (
"cmd/internal/objabi"
"cmd/internal/src"
"cmd/internal/sys"
- "internal/buildcfg"
"log"
"math"
"path"
@@ -647,13 +646,12 @@ func preprocess(ctxt *obj.Link, cursym *obj.LSym, newprog obj.ProgAlloc) {
var regg int16
if !p.From.Sym.NoSplit() || p.From.Sym.Wrapper() {
- if ctxt.Arch.Family == sys.AMD64 && buildcfg.Experiment.RegabiG && cursym.ABI() == obj.ABIInternal {
+ if ctxt.Arch.Family == sys.AMD64 && cursym.ABI() == obj.ABIInternal {
regg = REGG // use the g register directly in ABIInternal
} else {
p = obj.Appendp(p, newprog)
regg = REG_CX
if ctxt.Arch.Family == sys.AMD64 {
- // Using this register means that stacksplit works w/ //go:registerparams even when !buildcfg.Experiment.RegabiG
regg = REGG // == REG_R14
}
p = load_g(ctxt, p, newprog, regg) // load g into regg
diff --git a/src/cmd/internal/objabi/funcid.go b/src/cmd/internal/objabi/funcid.go
index 93ebd7be94..68f6a26a76 100644
--- a/src/cmd/internal/objabi/funcid.go
+++ b/src/cmd/internal/objabi/funcid.go
@@ -34,7 +34,6 @@ const (
FuncID_gogo
FuncID_gopanic
FuncID_handleAsyncEvent
- FuncID_jmpdefer
FuncID_mcall
FuncID_morestack
FuncID_mstart
@@ -60,7 +59,6 @@ var funcIDs = map[string]FuncID{
"gogo": FuncID_gogo,
"gopanic": FuncID_gopanic,
"handleAsyncEvent": FuncID_handleAsyncEvent,
- "jmpdefer": FuncID_jmpdefer,
"main": FuncID_runtime_main,
"mcall": FuncID_mcall,
"morestack": FuncID_morestack,
@@ -74,7 +72,6 @@ var funcIDs = map[string]FuncID{
// Don't show in call stack but otherwise not special.
"deferreturn": FuncID_wrapper,
"runOpenDeferFrame": FuncID_wrapper,
- "reflectcallSave": FuncID_wrapper,
"deferCallSave": FuncID_wrapper,
}
diff --git a/src/cmd/link/internal/ld/deadcode.go b/src/cmd/link/internal/ld/deadcode.go
index 416e5da398..dd5dafc21b 100644
--- a/src/cmd/link/internal/ld/deadcode.go
+++ b/src/cmd/link/internal/ld/deadcode.go
@@ -408,6 +408,9 @@ func (d *deadcodePass) decodeMethodSig(ldr *loader.Loader, arch *sys.Arch, symId
// Decode the method of interface type symbol symIdx at offset off.
func (d *deadcodePass) decodeIfaceMethod(ldr *loader.Loader, arch *sys.Arch, symIdx loader.Sym, off int64) methodsig {
p := ldr.Data(symIdx)
+ if p == nil {
+ panic(fmt.Sprintf("missing symbol %q", ldr.SymName(symIdx)))
+ }
if decodetypeKind(arch, p)&kindMask != kindInterface {
panic(fmt.Sprintf("symbol %q is not an interface", ldr.SymName(symIdx)))
}
diff --git a/src/cmd/link/internal/ld/dwarf.go b/src/cmd/link/internal/ld/dwarf.go
index c53d2408cb..98ab248e13 100644
--- a/src/cmd/link/internal/ld/dwarf.go
+++ b/src/cmd/link/internal/ld/dwarf.go
@@ -1190,7 +1190,7 @@ func (d *dwctxt) writeDirFileTables(unit *sym.CompilationUnit, lsu *loader.Symbo
// We can't use something that may be dead-code
// eliminated from a binary here. proc.go contains
// main and the scheduler, so it's not going anywhere.
- if i := strings.Index(name, "runtime/proc.go"); i >= 0 {
+ if i := strings.Index(name, "runtime/proc.go"); i >= 0 && unit.Lib.Pkg == "runtime" {
d.dwmu.Lock()
if gdbscript == "" {
k := strings.Index(name, "runtime/proc.go")
diff --git a/src/cmd/link/internal/ld/dwarf_test.go b/src/cmd/link/internal/ld/dwarf_test.go
index 2f59c2fe0a..543dd5caac 100644
--- a/src/cmd/link/internal/ld/dwarf_test.go
+++ b/src/cmd/link/internal/ld/dwarf_test.go
@@ -101,8 +101,11 @@ func gobuild(t *testing.T, dir string, testfile string, gcflags string) *builtFi
}
cmd := exec.Command(testenv.GoToolPath(t), "build", gcflags, "-o", dst, src)
- if b, err := cmd.CombinedOutput(); err != nil {
- t.Logf("build: %s\n", b)
+ b, err := cmd.CombinedOutput()
+ if len(b) != 0 {
+ t.Logf("## build output:\n%s", b)
+ }
+ if err != nil {
t.Fatalf("build error: %v", err)
}
diff --git a/src/cmd/link/internal/ld/elf.go b/src/cmd/link/internal/ld/elf.go
index 81011638bc..93f974c5e0 100644
--- a/src/cmd/link/internal/ld/elf.go
+++ b/src/cmd/link/internal/ld/elf.go
@@ -16,6 +16,7 @@ import (
"fmt"
"internal/buildcfg"
"path/filepath"
+ "runtime"
"sort"
"strings"
)
@@ -480,10 +481,6 @@ func Elfwritedynent(arch *sys.Arch, s *loader.SymbolBuilder, tag elf.DynTag, val
}
}
-func elfwritedynentsym(ctxt *Link, s *loader.SymbolBuilder, tag elf.DynTag, t loader.Sym) {
- Elfwritedynentsymplus(ctxt, s, tag, t, 0)
-}
-
func Elfwritedynentsymplus(ctxt *Link, s *loader.SymbolBuilder, tag elf.DynTag, t loader.Sym, add int64) {
if elf64 {
s.AddUint64(ctxt.Arch, uint64(tag))
@@ -1472,24 +1469,24 @@ func (ctxt *Link) doelf() {
/*
* .dynamic table
*/
- elfwritedynentsym(ctxt, dynamic, elf.DT_HASH, hash.Sym())
+ elfWriteDynEntSym(ctxt, dynamic, elf.DT_HASH, hash.Sym())
- elfwritedynentsym(ctxt, dynamic, elf.DT_SYMTAB, dynsym.Sym())
+ elfWriteDynEntSym(ctxt, dynamic, elf.DT_SYMTAB, dynsym.Sym())
if elf64 {
Elfwritedynent(ctxt.Arch, dynamic, elf.DT_SYMENT, ELF64SYMSIZE)
} else {
Elfwritedynent(ctxt.Arch, dynamic, elf.DT_SYMENT, ELF32SYMSIZE)
}
- elfwritedynentsym(ctxt, dynamic, elf.DT_STRTAB, dynstr.Sym())
+ elfWriteDynEntSym(ctxt, dynamic, elf.DT_STRTAB, dynstr.Sym())
elfwritedynentsymsize(ctxt, dynamic, elf.DT_STRSZ, dynstr.Sym())
if elfRelType == ".rela" {
rela := ldr.LookupOrCreateSym(".rela", 0)
- elfwritedynentsym(ctxt, dynamic, elf.DT_RELA, rela)
+ elfWriteDynEntSym(ctxt, dynamic, elf.DT_RELA, rela)
elfwritedynentsymsize(ctxt, dynamic, elf.DT_RELASZ, rela)
Elfwritedynent(ctxt.Arch, dynamic, elf.DT_RELAENT, ELF64RELASIZE)
} else {
rel := ldr.LookupOrCreateSym(".rel", 0)
- elfwritedynentsym(ctxt, dynamic, elf.DT_REL, rel)
+ elfWriteDynEntSym(ctxt, dynamic, elf.DT_REL, rel)
elfwritedynentsymsize(ctxt, dynamic, elf.DT_RELSZ, rel)
Elfwritedynent(ctxt.Arch, dynamic, elf.DT_RELENT, ELF32RELSIZE)
}
@@ -1499,9 +1496,9 @@ func (ctxt *Link) doelf() {
}
if ctxt.IsPPC64() {
- elfwritedynentsym(ctxt, dynamic, elf.DT_PLTGOT, plt.Sym())
+ elfWriteDynEntSym(ctxt, dynamic, elf.DT_PLTGOT, plt.Sym())
} else {
- elfwritedynentsym(ctxt, dynamic, elf.DT_PLTGOT, gotplt.Sym())
+ elfWriteDynEntSym(ctxt, dynamic, elf.DT_PLTGOT, gotplt.Sym())
}
if ctxt.IsPPC64() {
@@ -1749,7 +1746,7 @@ func asmbElf(ctxt *Link) {
sh.Flags = uint64(elf.SHF_ALLOC)
sh.Addralign = 1
- if interpreter == "" && buildcfg.GO_LDSO != "" {
+ if interpreter == "" && buildcfg.GOOS == runtime.GOOS && buildcfg.GOARCH == runtime.GOARCH && buildcfg.GO_LDSO != "" {
interpreter = buildcfg.GO_LDSO
}
diff --git a/src/cmd/link/internal/ld/lib.go b/src/cmd/link/internal/ld/lib.go
index 4cfee4a1e7..494fea5e72 100644
--- a/src/cmd/link/internal/ld/lib.go
+++ b/src/cmd/link/internal/ld/lib.go
@@ -698,7 +698,9 @@ func (ctxt *Link) linksetup() {
Peinit(ctxt)
}
- if ctxt.HeadType == objabi.Hdarwin && ctxt.LinkMode == LinkExternal {
+ if ctxt.LinkMode == LinkExternal {
+ // When external linking, we are creating an object file. The
+ // absolute address is irrelevant.
*FlagTextAddr = 0
}
diff --git a/src/cmd/link/internal/ld/pcln.go b/src/cmd/link/internal/ld/pcln.go
index 05fd302369..70e3e1284b 100644
--- a/src/cmd/link/internal/ld/pcln.go
+++ b/src/cmd/link/internal/ld/pcln.go
@@ -129,11 +129,10 @@ func computeDeferReturn(ctxt *Link, deferReturnSym, s loader.Sym) uint32 {
for ri := 0; ri < relocs.Count(); ri++ {
r := relocs.At(ri)
if target.IsWasm() && r.Type() == objabi.R_ADDR {
- // Wasm does not have a live variable set at the deferreturn
- // call itself. Instead it has one identified by the
- // resumption point immediately preceding the deferreturn.
- // The wasm code has a R_ADDR relocation which is used to
- // set the resumption point to PC_B.
+ // wasm/ssa.go generates an ARESUMEPOINT just
+ // before the deferreturn call. The "PC" of
+ // the deferreturn call is stored in the
+ // R_ADDR relocation on the ARESUMEPOINT.
lastWasmAddr = uint32(r.Add())
}
if r.Type().IsDirectCall() && (r.Sym() == deferReturnSym || ldr.IsDeferReturnTramp(r.Sym())) {
diff --git a/src/cmd/link/internal/ld/pe.go b/src/cmd/link/internal/ld/pe.go
index 8eb4231c3a..871bf8de2b 100644
--- a/src/cmd/link/internal/ld/pe.go
+++ b/src/cmd/link/internal/ld/pe.go
@@ -1061,6 +1061,8 @@ func Peinit(ctxt *Link) {
// linker will honour that requirement.
PESECTALIGN = 32
PEFILEALIGN = 0
+ // We are creating an object file. The absolute address is irrelevant.
+ PEBASE = 0
}
var sh [16]pe.SectionHeader32
diff --git a/src/cmd/link/internal/ld/symtab.go b/src/cmd/link/internal/ld/symtab.go
index 00f557875a..1f5e333cfd 100644
--- a/src/cmd/link/internal/ld/symtab.go
+++ b/src/cmd/link/internal/ld/symtab.go
@@ -300,6 +300,7 @@ func putplan9sym(ctxt *Link, ldr *loader.Loader, s loader.Sym, char SymbolType)
ctxt.Out.Write8(uint8(t + 0x80)) /* 0x80 is variable length */
name := ldr.SymName(s)
+ name = mangleABIName(ctxt, ldr, s, name)
ctxt.Out.WriteString(name)
ctxt.Out.Write8(0)
diff --git a/src/cmd/link/internal/loadelf/ldelf.go b/src/cmd/link/internal/loadelf/ldelf.go
index c6956297f6..b4f565a153 100644
--- a/src/cmd/link/internal/loadelf/ldelf.go
+++ b/src/cmd/link/internal/loadelf/ldelf.go
@@ -22,7 +22,7 @@ import (
/*
Derived from Plan 9 from User Space's src/libmach/elf.h, elf.c
-http://code.swtch.com/plan9port/src/tip/src/libmach/
+https://github.com/9fans/plan9port/tree/master/src/libmach/
Copyright © 2004 Russ Cox.
Portions Copyright © 2008-2010 Google Inc.
diff --git a/src/cmd/link/internal/loadmacho/ldmacho.go b/src/cmd/link/internal/loadmacho/ldmacho.go
index e7d9eebc33..5402ecd748 100644
--- a/src/cmd/link/internal/loadmacho/ldmacho.go
+++ b/src/cmd/link/internal/loadmacho/ldmacho.go
@@ -18,7 +18,7 @@ import (
/*
Derived from Plan 9 from User Space's src/libmach/elf.h, elf.c
-http://code.swtch.com/plan9port/src/tip/src/libmach/
+https://github.com/9fans/plan9port/tree/master/src/libmach/
Copyright © 2004 Russ Cox.
Portions Copyright © 2008-2010 Google Inc.
diff --git a/src/cmd/link/link_test.go b/src/cmd/link/link_test.go
index 77d42cceda..2b0b2dc4a1 100644
--- a/src/cmd/link/link_test.go
+++ b/src/cmd/link/link_test.go
@@ -547,14 +547,13 @@ const testFuncAlignSrc = `
package main
import (
"fmt"
- "reflect"
)
func alignPc()
+var alignPcFnAddr uintptr
func main() {
- addr := reflect.ValueOf(alignPc).Pointer()
- if (addr % 512) != 0 {
- fmt.Printf("expected 512 bytes alignment, got %v\n", addr)
+ if alignPcFnAddr % 512 != 0 {
+ fmt.Printf("expected 512 bytes alignment, got %v\n", alignPcFnAddr)
} else {
fmt.Printf("PASS")
}
@@ -569,6 +568,9 @@ TEXT ·alignPc(SB),NOSPLIT, $0-0
PCALIGN $512
MOVD $3, R1
RET
+
+GLOBL ·alignPcFnAddr(SB),RODATA,$8
+DATA ·alignPcFnAddr(SB)/8,$·alignPc(SB)
`
// TestFuncAlign verifies that the address of a function can be aligned
diff --git a/src/cmd/trace/annotations.go b/src/cmd/trace/annotations.go
index 9b45457436..1c0dad56d8 100644
--- a/src/cmd/trace/annotations.go
+++ b/src/cmd/trace/annotations.go
@@ -407,10 +407,7 @@ func (tasks allTasks) task(taskID uint64) *taskDesc {
return t
}
- t = &taskDesc{
- id: taskID,
- goroutines: make(map[uint64]struct{}),
- }
+ t = newTaskDesc(taskID)
tasks[taskID] = t
return t
}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go
index eb0016b18f..7b82d0b6dd 100644
--- a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go
+++ b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go
@@ -51,6 +51,11 @@ type asmArch struct {
bigEndian bool
stack string
lr bool
+ // retRegs is a list of registers for return value in register ABI (ABIInternal).
+ // For now, as we only check whether we write to any result, here we only need to
+ // include the first integer register and first floating-point register. Accessing
+ // any of them counts as writing to result.
+ retRegs []string
// calculated during initialization
sizes types.Sizes
intSize int
@@ -79,8 +84,8 @@ type asmVar struct {
var (
asmArch386 = asmArch{name: "386", bigEndian: false, stack: "SP", lr: false}
asmArchArm = asmArch{name: "arm", bigEndian: false, stack: "R13", lr: true}
- asmArchArm64 = asmArch{name: "arm64", bigEndian: false, stack: "RSP", lr: true}
- asmArchAmd64 = asmArch{name: "amd64", bigEndian: false, stack: "SP", lr: false}
+ asmArchArm64 = asmArch{name: "arm64", bigEndian: false, stack: "RSP", lr: true, retRegs: []string{"R0", "F0"}}
+ asmArchAmd64 = asmArch{name: "amd64", bigEndian: false, stack: "SP", lr: false, retRegs: []string{"AX", "X0"}}
asmArchMips = asmArch{name: "mips", bigEndian: true, stack: "R29", lr: true}
asmArchMipsLE = asmArch{name: "mipsle", bigEndian: false, stack: "R29", lr: true}
asmArchMips64 = asmArch{name: "mips64", bigEndian: true, stack: "R29", lr: true}
@@ -137,7 +142,7 @@ var (
asmSP = re(`[^+\-0-9](([0-9]+)\(([A-Z0-9]+)\))`)
asmOpcode = re(`^\s*(?:[A-Z0-9a-z_]+:)?\s*([A-Z]+)\s*([^,]*)(?:,\s*(.*))?`)
ppc64Suff = re(`([BHWD])(ZU|Z|U|BR)?$`)
- abiSuff = re(`^(.+)<ABI.+>$`)
+ abiSuff = re(`^(.+)<(ABI.+)>$`)
)
func run(pass *analysis.Pass) (interface{}, error) {
@@ -185,6 +190,7 @@ Files:
var (
fn *asmFunc
fnName string
+ abi string
localSize, argSize int
wroteSP bool
noframe bool
@@ -195,18 +201,22 @@ Files:
flushRet := func() {
if fn != nil && fn.vars["ret"] != nil && !haveRetArg && len(retLine) > 0 {
v := fn.vars["ret"]
+ resultStr := fmt.Sprintf("%d-byte ret+%d(FP)", v.size, v.off)
+ if abi == "ABIInternal" {
+ resultStr = "result register"
+ }
for _, line := range retLine {
- pass.Reportf(analysisutil.LineStart(tf, line), "[%s] %s: RET without writing to %d-byte ret+%d(FP)", arch, fnName, v.size, v.off)
+ pass.Reportf(analysisutil.LineStart(tf, line), "[%s] %s: RET without writing to %s", arch, fnName, resultStr)
}
}
retLine = nil
}
- trimABI := func(fnName string) string {
+ trimABI := func(fnName string) (string, string) {
m := abiSuff.FindStringSubmatch(fnName)
if m != nil {
- return m[1]
+ return m[1], m[2]
}
- return fnName
+ return fnName, ""
}
for lineno, line := range lines {
lineno++
@@ -273,11 +283,12 @@ Files:
// log.Printf("%s:%d: [%s] cannot check cross-package assembly function: %s is in package %s", fname, lineno, arch, fnName, pkgPath)
fn = nil
fnName = ""
+ abi = ""
continue
}
}
// Trim off optional ABI selector.
- fnName := trimABI(fnName)
+ fnName, abi = trimABI(fnName)
flag := m[3]
fn = knownFunc[fnName][arch]
if fn != nil {
@@ -305,6 +316,7 @@ Files:
flushRet()
fn = nil
fnName = ""
+ abi = ""
continue
}
@@ -335,6 +347,15 @@ Files:
haveRetArg = true
}
+ if abi == "ABIInternal" && !haveRetArg {
+ for _, reg := range archDef.retRegs {
+ if strings.Contains(line, reg) {
+ haveRetArg = true
+ break
+ }
+ }
+ }
+
for _, m := range asmSP.FindAllStringSubmatch(line, -1) {
if m[3] != archDef.stack || wroteSP || noframe {
continue
diff --git a/src/cmd/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go b/src/cmd/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go
index b949fc8407..5fe75b14c7 100644
--- a/src/cmd/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go
+++ b/src/cmd/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go
@@ -439,8 +439,10 @@ func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.
}
default:
- if typeparams.IsListExpr(n) {
- a.applyList(n, "ElemList")
+ if ix := typeparams.GetIndexExprData(n); ix != nil {
+ a.apply(n, "X", nil, ix.X)
+ // *ast.IndexExpr was handled above, so n must be an *ast.MultiIndexExpr.
+ a.applyList(n, "Indices")
} else {
panic(fmt.Sprintf("Apply: unexpected node type %T", n))
}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go b/src/cmd/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go
index cffd7acbee..81e8fdcf0c 100644
--- a/src/cmd/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go
+++ b/src/cmd/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go
@@ -58,7 +58,7 @@ type Path string
// - The only OT operator is Object.Type,
// which we encode as '.' because dot cannot appear in an identifier.
// - The TT operators are encoded as [EKPRU].
-// - The OT operators are encoded as [AFMO];
+// - The TO operators are encoded as [AFMO];
// three of these (At,Field,Method) require an integer operand,
// which is encoded as a string of decimal digits.
// These indices are stable across different representations
diff --git a/src/cmd/vendor/golang.org/x/tools/internal/lsp/fuzzy/input.go b/src/cmd/vendor/golang.org/x/tools/internal/lsp/fuzzy/input.go
index ac377035ec..c1038163f1 100644
--- a/src/cmd/vendor/golang.org/x/tools/internal/lsp/fuzzy/input.go
+++ b/src/cmd/vendor/golang.org/x/tools/internal/lsp/fuzzy/input.go
@@ -27,23 +27,23 @@ const (
// RuneRoles detects the roles of each byte rune in an input string and stores it in the output
// slice. The rune role depends on the input type. Stops when it parsed all the runes in the string
// or when it filled the output. If output is nil, then it gets created.
-func RuneRoles(str string, reuse []RuneRole) []RuneRole {
+func RuneRoles(candidate []byte, reuse []RuneRole) []RuneRole {
var output []RuneRole
- if cap(reuse) < len(str) {
- output = make([]RuneRole, 0, len(str))
+ if cap(reuse) < len(candidate) {
+ output = make([]RuneRole, 0, len(candidate))
} else {
output = reuse[:0]
}
prev, prev2 := rtNone, rtNone
- for i := 0; i < len(str); i++ {
- r := rune(str[i])
+ for i := 0; i < len(candidate); i++ {
+ r := rune(candidate[i])
role := RNone
curr := rtLower
- if str[i] <= unicode.MaxASCII {
- curr = runeType(rt[str[i]] - '0')
+ if candidate[i] <= unicode.MaxASCII {
+ curr = runeType(rt[candidate[i]] - '0')
}
if curr == rtLower {
@@ -58,7 +58,7 @@ func RuneRoles(str string, reuse []RuneRole) []RuneRole {
if prev == rtUpper {
// This and previous characters are both upper case.
- if i+1 == len(str) {
+ if i+1 == len(candidate) {
// This is last character, previous was also uppercase -> this is UCTail
// i.e., (current char is C): aBC / BC / ABC
role = RUCTail
@@ -118,11 +118,26 @@ func LastSegment(input string, roles []RuneRole) string {
return input[start+1 : end+1]
}
-// ToLower transforms the input string to lower case, which is stored in the output byte slice.
+// fromChunks copies string chunks into the given buffer.
+func fromChunks(chunks []string, buffer []byte) []byte {
+ ii := 0
+ for _, chunk := range chunks {
+ for i := 0; i < len(chunk); i++ {
+ if ii >= cap(buffer) {
+ break
+ }
+ buffer[ii] = chunk[i]
+ ii++
+ }
+ }
+ return buffer[:ii]
+}
+
+// toLower transforms the input string to lower case, which is stored in the output byte slice.
// The lower casing considers only ASCII values - non ASCII values are left unmodified.
// Stops when parsed all input or when it filled the output slice. If output is nil, then it gets
// created.
-func ToLower(input string, reuse []byte) []byte {
+func toLower(input []byte, reuse []byte) []byte {
output := reuse
if cap(reuse) < len(input) {
output = make([]byte, len(input))
@@ -130,7 +145,7 @@ func ToLower(input string, reuse []byte) []byte {
for i := 0; i < len(input); i++ {
r := rune(input[i])
- if r <= unicode.MaxASCII {
+ if input[i] <= unicode.MaxASCII {
if 'A' <= r && r <= 'Z' {
r += 'a' - 'A'
}
diff --git a/src/cmd/vendor/golang.org/x/tools/internal/lsp/fuzzy/matcher.go b/src/cmd/vendor/golang.org/x/tools/internal/lsp/fuzzy/matcher.go
index 16a643097d..265cdcf160 100644
--- a/src/cmd/vendor/golang.org/x/tools/internal/lsp/fuzzy/matcher.go
+++ b/src/cmd/vendor/golang.org/x/tools/internal/lsp/fuzzy/matcher.go
@@ -51,8 +51,12 @@ type Matcher struct {
lastCandidateLen int // in bytes
lastCandidateMatched bool
- // Here we save the last candidate in lower-case. This is basically a byte slice we reuse for
- // performance reasons, so the slice is not reallocated for every candidate.
+ // Reusable buffers to avoid allocating for every candidate.
+ // - inputBuf stores the concatenated input chunks
+ // - lowerBuf stores the last candidate in lower-case
+ // - rolesBuf stores the calculated roles for each rune in the last
+ // candidate.
+ inputBuf [MaxInputSize]byte
lowerBuf [MaxInputSize]byte
rolesBuf [MaxInputSize]RuneRole
}
@@ -72,7 +76,7 @@ func NewMatcher(pattern string) *Matcher {
m := &Matcher{
pattern: pattern,
- patternLower: ToLower(pattern, nil),
+ patternLower: toLower([]byte(pattern), nil),
}
for i, c := range m.patternLower {
@@ -88,7 +92,7 @@ func NewMatcher(pattern string) *Matcher {
m.patternShort = m.patternLower
}
- m.patternRoles = RuneRoles(pattern, nil)
+ m.patternRoles = RuneRoles([]byte(pattern), nil)
if len(pattern) > 0 {
maxCharScore := 4
@@ -102,10 +106,15 @@ func NewMatcher(pattern string) *Matcher {
// This is not designed for parallel use. Multiple candidates must be scored sequentially.
// Returns a score between 0 and 1 (0 - no match, 1 - perfect match).
func (m *Matcher) Score(candidate string) float32 {
+ return m.ScoreChunks([]string{candidate})
+}
+
+func (m *Matcher) ScoreChunks(chunks []string) float32 {
+ candidate := fromChunks(chunks, m.inputBuf[:])
if len(candidate) > MaxInputSize {
candidate = candidate[:MaxInputSize]
}
- lower := ToLower(candidate, m.lowerBuf[:])
+ lower := toLower(candidate, m.lowerBuf[:])
m.lastCandidateLen = len(candidate)
if len(m.pattern) == 0 {
@@ -174,7 +183,7 @@ func (m *Matcher) MatchedRanges() []int {
return ret
}
-func (m *Matcher) match(candidate string, candidateLower []byte) bool {
+func (m *Matcher) match(candidate []byte, candidateLower []byte) bool {
i, j := 0, 0
for ; i < len(candidateLower) && j < len(m.patternLower); i++ {
if candidateLower[i] == m.patternLower[j] {
@@ -192,7 +201,7 @@ func (m *Matcher) match(candidate string, candidateLower []byte) bool {
return true
}
-func (m *Matcher) computeScore(candidate string, candidateLower []byte) int {
+func (m *Matcher) computeScore(candidate []byte, candidateLower []byte) int {
pattLen, candLen := len(m.pattern), len(candidate)
for j := 0; j <= len(m.pattern); j++ {
diff --git a/src/cmd/vendor/golang.org/x/tools/internal/lsp/fuzzy/symbol.go b/src/cmd/vendor/golang.org/x/tools/internal/lsp/fuzzy/symbol.go
new file mode 100644
index 0000000000..062f491fb5
--- /dev/null
+++ b/src/cmd/vendor/golang.org/x/tools/internal/lsp/fuzzy/symbol.go
@@ -0,0 +1,224 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package fuzzy
+
+import (
+ "unicode"
+)
+
+// SymbolMatcher implements a fuzzy matching algorithm optimized for Go symbols
+// of the form:
+// example.com/path/to/package.object.field
+//
+// Knowing that we are matching symbols like this allows us to make the
+// following optimizations:
+// - We can incorporate right-to-left relevance directly into the score
+// calculation.
+// - We can match from right to left, discarding leading bytes if the input is
+// too long.
+// - We just take the right-most match without losing too much precision. This
+// allows us to use an O(n) algorithm.
+// - We can operate directly on chunked strings; in many cases we will
+// be storing the package path and/or package name separately from the
+// symbol or identifiers, so doing this avoids allocating strings.
+// - We can return the index of the right-most match, allowing us to trim
+// irrelevant qualification.
+//
+// This implementation is experimental, serving as a reference fast algorithm
+// to compare to the fuzzy algorithm implemented by Matcher.
+type SymbolMatcher struct {
+ // Using buffers of length 256 is both a reasonable size for most qualified
+ // symbols, and makes it easy to avoid bounds checks by using uint8 indexes.
+ pattern [256]rune
+ patternLen uint8
+ inputBuffer [256]rune // avoid allocating when considering chunks
+ roles [256]uint32 // which roles does a rune play (word start, etc.)
+ segments [256]uint8 // how many segments from the right is each rune
+}
+
+const (
+ segmentStart uint32 = 1 << iota
+ wordStart
+ separator
+)
+
+// NewSymbolMatcher creates a SymbolMatcher that may be used to match the given
+// search pattern.
+//
+// Currently this matcher only accepts case-insensitive fuzzy patterns.
+//
+// TODO(rfindley):
+// - implement smart-casing
+// - implement space-separated groups
+// - implement ', ^, and $ modifiers
+//
+// An empty pattern matches no input.
+func NewSymbolMatcher(pattern string) *SymbolMatcher {
+ m := &SymbolMatcher{}
+ for _, p := range pattern {
+ m.pattern[m.patternLen] = unicode.ToLower(p)
+ m.patternLen++
+ if m.patternLen == 255 || int(m.patternLen) == len(pattern) {
+ // break at 255 so that we can represent patternLen with a uint8.
+ break
+ }
+ }
+ return m
+}
+
+// Match looks for the right-most match of the search pattern within the symbol
+// represented by concatenating the given chunks, returning its offset and
+// score.
+//
+// If a match is found, the first return value will hold the absolute byte
+// offset within all chunks for the start of the symbol. In other words, the
+// index of the match within strings.Join(chunks, ""). If no match is found,
+// the first return value will be -1.
+//
+// The second return value will be the score of the match, which is always
+// between 0 and 1, inclusive. A score of 0 indicates no match.
+func (m *SymbolMatcher) Match(chunks []string) (int, float64) {
+ // Explicit behavior for an empty pattern.
+ //
+ // As a minor optimization, this also avoids nilness checks later on, since
+ // the compiler can prove that m != nil.
+ if m.patternLen == 0 {
+ return -1, 0
+ }
+
+ // First phase: populate the input buffer with lower-cased runes.
+ //
+ // We could also check for a forward match here, but since we'd have to write
+ // the entire input anyway this has negligible impact on performance.
+
+ var (
+ inputLen = uint8(0)
+ modifiers = wordStart | segmentStart
+ )
+
+input:
+ for _, chunk := range chunks {
+ for _, r := range chunk {
+ if r == '.' || r == '/' {
+ modifiers |= separator
+ }
+ // optimization: avoid calls to unicode.ToLower, which can't be inlined.
+ l := r
+ if r <= unicode.MaxASCII {
+ if 'A' <= r && r <= 'Z' {
+ l = r + 'a' - 'A'
+ }
+ } else {
+ l = unicode.ToLower(r)
+ }
+ if l != r {
+ modifiers |= wordStart
+ }
+ m.inputBuffer[inputLen] = l
+ m.roles[inputLen] = modifiers
+ inputLen++
+ if m.roles[inputLen-1]&separator != 0 {
+ modifiers = wordStart | segmentStart
+ } else {
+ modifiers = 0
+ }
+ // TODO: we should prefer the right-most input if it overflows, rather
+ // than the left-most as we're doing here.
+ if inputLen == 255 {
+ break input
+ }
+ }
+ }
+
+ // Second phase: find the right-most match, and count segments from the
+ // right.
+
+ var (
+ pi = uint8(m.patternLen - 1) // pattern index
+ p = m.pattern[pi] // pattern rune
+ start = -1 // start offset of match
+ rseg = uint8(0)
+ )
+ const maxSeg = 3 // maximum number of segments from the right to count, for scoring purposes.
+
+ for ii := inputLen - 1; ; ii-- {
+ r := m.inputBuffer[ii]
+ if rseg < maxSeg && m.roles[ii]&separator != 0 {
+ rseg++
+ }
+ m.segments[ii] = rseg
+ if p == r {
+ if pi == 0 {
+ start = int(ii)
+ break
+ }
+ pi--
+ p = m.pattern[pi]
+ }
+ // Don't check ii >= 0 in the loop condition: ii is a uint8.
+ if ii == 0 {
+ break
+ }
+ }
+
+ if start < 0 {
+ // no match: skip scoring
+ return -1, 0
+ }
+
+ // Third phase: find the shortest match, and compute the score.
+
+ // Score is the average score for each character.
+ //
+ // A character score is the multiple of:
+ // 1. 1.0 if the character starts a segment, .8 if the character start a
+ // mid-segment word, otherwise 0.6. This carries over to immediately
+ // following characters.
+ // 2. 1.0 if the character is part of the last segment, otherwise
+ // 1.0-.2*<segments from the right>, with a max segment count of 3.
+ //
+ // This is a very naive algorithm, but it is fast. There's lots of prior art
+ // here, and we should leverage it. For example, we could explicitly consider
+ // character distance, and exact matches of words or segments.
+ //
+ // Also note that this might not actually find the highest scoring match, as
+ // doing so could require a non-linear algorithm, depending on how the score
+ // is calculated.
+
+ pi = 0
+ p = m.pattern[pi]
+
+ const (
+ segStreak = 1.0
+ wordStreak = 0.8
+ noStreak = 0.6
+ perSegment = 0.2 // we count at most 3 segments above
+ )
+
+ streakBonus := noStreak
+ totScore := 0.0
+ for ii := uint8(start); ii < inputLen; ii++ {
+ r := m.inputBuffer[ii]
+ if r == p {
+ pi++
+ p = m.pattern[pi]
+ // Note: this could be optimized with some bit operations.
+ switch {
+ case m.roles[ii]&segmentStart != 0 && segStreak > streakBonus:
+ streakBonus = segStreak
+ case m.roles[ii]&wordStart != 0 && wordStreak > streakBonus:
+ streakBonus = wordStreak
+ }
+ totScore += streakBonus * (1.0 - float64(m.segments[ii])*perSegment)
+ if pi >= m.patternLen {
+ break
+ }
+ } else {
+ streakBonus = noStreak
+ }
+ }
+
+ return start, totScore / float64(m.patternLen)
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/internal/typeparams/doc.go b/src/cmd/vendor/golang.org/x/tools/internal/typeparams/common.go
index 5583947e21..9fc6b4beb8 100644
--- a/src/cmd/vendor/golang.org/x/tools/internal/typeparams/doc.go
+++ b/src/cmd/vendor/golang.org/x/tools/internal/typeparams/common.go
@@ -9,3 +9,17 @@
// This package exists to make it easier for tools to work with generic code,
// while also compiling against older Go versions.
package typeparams
+
+import (
+ "go/ast"
+ "go/token"
+)
+
+// A IndexExprData holds data from both ast.IndexExpr and the new
+// ast.MultiIndexExpr, which was introduced in Go 1.18.
+type IndexExprData struct {
+ X ast.Expr // expression
+ Lbrack token.Pos // position of "["
+ Indices []ast.Expr // index expressions
+ Rbrack token.Pos // position of "]"
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/internal/typeparams/notypeparams.go b/src/cmd/vendor/golang.org/x/tools/internal/typeparams/notypeparams.go
index 3a0abc7c18..e975e476f6 100644
--- a/src/cmd/vendor/golang.org/x/tools/internal/typeparams/notypeparams.go
+++ b/src/cmd/vendor/golang.org/x/tools/internal/typeparams/notypeparams.go
@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-//go:build !typeparams || !go1.17
-// +build !typeparams !go1.17
+//go:build !typeparams || !go1.18
+// +build !typeparams !go1.18
package typeparams
@@ -18,17 +18,25 @@ import (
// environment.
const Enabled = false
-// UnpackIndex extracts all index expressions from e. For non-generic code this
-// is always one expression: e.Index, but may be more than one expression for
-// generic type instantiation.
-func UnpackIndex(e *ast.IndexExpr) []ast.Expr {
- return []ast.Expr{e.Index}
-}
-
-// IsListExpr reports whether n is an *ast.ListExpr, which is a new node type
-// introduced to hold type arguments for generic type instantiation.
-func IsListExpr(n ast.Node) bool {
- return false
+// GetIndexExprData extracts data from AST nodes that represent index
+// expressions.
+//
+// For an ast.IndexExpr, the resulting IndexExprData will have exactly one
+// index expression. For an ast.MultiIndexExpr (go1.18+), it may have a
+// variable number of index expressions.
+//
+// For nodes that don't represent index expressions, GetIndexExprData returns
+// nil.
+func GetIndexExprData(n ast.Node) *IndexExprData {
+ if e, _ := n.(*ast.IndexExpr); e != nil {
+ return &IndexExprData{
+ X: e.X,
+ Lbrack: e.Lbrack,
+ Indices: []ast.Expr{e.Index},
+ Rbrack: e.Rbrack,
+ }
+ }
+ return nil
}
// ForTypeDecl extracts the (possibly nil) type parameter node list from n.
@@ -47,11 +55,6 @@ func ForSignature(*types.Signature) []*types.TypeName {
return nil
}
-// HasTypeSet reports if iface has a type set.
-func HasTypeSet(*types.Interface) bool {
- return false
-}
-
// IsComparable reports if iface is the comparable interface.
func IsComparable(*types.Interface) bool {
return false
diff --git a/src/cmd/vendor/golang.org/x/tools/internal/typeparams/typeparams.go b/src/cmd/vendor/golang.org/x/tools/internal/typeparams/typeparams.go
index 6b7958af06..be6b0525f6 100644
--- a/src/cmd/vendor/golang.org/x/tools/internal/typeparams/typeparams.go
+++ b/src/cmd/vendor/golang.org/x/tools/internal/typeparams/typeparams.go
@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-//go:build typeparams && go1.17
-// +build typeparams,go1.17
+//go:build typeparams && go1.18
+// +build typeparams,go1.18
package typeparams
@@ -18,26 +18,30 @@ import (
// environment.
const Enabled = true
-// UnpackIndex extracts all index expressions from e. For non-generic code this
-// is always one expression: e.Index, but may be more than one expression for
-// generic type instantiation.
-func UnpackIndex(e *ast.IndexExpr) []ast.Expr {
- if x, _ := e.Index.(*ast.ListExpr); x != nil {
- return x.ElemList
- }
- if e.Index != nil {
- return []ast.Expr{e.Index}
+// GetIndexExprData extracts data from AST nodes that represent index
+// expressions.
+//
+// For an ast.IndexExpr, the resulting IndexExprData will have exactly one
+// index expression. For an ast.MultiIndexExpr (go1.18+), it may have a
+// variable number of index expressions.
+//
+// For nodes that don't represent index expressions, GetIndexExprData returns
+// nil.
+func GetIndexExprData(n ast.Node) *IndexExprData {
+ switch e := n.(type) {
+ case *ast.IndexExpr:
+ return &IndexExprData{
+ X: e.X,
+ Lbrack: e.Lbrack,
+ Indices: []ast.Expr{e.Index},
+ Rbrack: e.Rbrack,
+ }
+ case *ast.MultiIndexExpr:
+ return (*IndexExprData)(e)
}
return nil
}
-// IsListExpr reports whether n is an *ast.ListExpr, which is a new node type
-// introduced to hold type arguments for generic type instantiation.
-func IsListExpr(n ast.Node) bool {
- _, ok := n.(*ast.ListExpr)
- return ok
-}
-
// ForTypeDecl extracts the (possibly nil) type parameter node list from n.
func ForTypeDecl(n *ast.TypeSpec) *ast.FieldList {
return n.TParams
@@ -54,12 +58,7 @@ func ForFuncDecl(n *ast.FuncDecl) *ast.FieldList {
// ForSignature extracts the (possibly empty) type parameter object list from
// sig.
func ForSignature(sig *types.Signature) []*types.TypeName {
- return sig.TParams()
-}
-
-// HasTypeSet reports if iface has a type set.
-func HasTypeSet(iface *types.Interface) bool {
- return iface.HasTypeList()
+ return tparamsSlice(sig.TParams())
}
// IsComparable reports if iface is the comparable interface.
@@ -76,7 +75,18 @@ func IsConstraint(iface *types.Interface) bool {
// ForNamed extracts the (possibly empty) type parameter object list from
// named.
func ForNamed(named *types.Named) []*types.TypeName {
- return named.TParams()
+ return tparamsSlice(named.TParams())
+}
+
+func tparamsSlice(tparams *types.TypeParams) []*types.TypeName {
+ if tparams.Len() == 0 {
+ return nil
+ }
+ result := make([]*types.TypeName, tparams.Len())
+ for i := 0; i < tparams.Len(); i++ {
+ result[i] = tparams.At(i)
+ }
+ return result
}
// NamedTArgs extracts the (possibly empty) type argument list from named.
diff --git a/src/cmd/vendor/modules.txt b/src/cmd/vendor/modules.txt
index e032ccc2c7..eed06a9f50 100644
--- a/src/cmd/vendor/modules.txt
+++ b/src/cmd/vendor/modules.txt
@@ -48,7 +48,7 @@ golang.org/x/sys/windows
# golang.org/x/term v0.0.0-20210503060354-a79de5458b56
## explicit; go 1.17
golang.org/x/term
-# golang.org/x/tools v0.1.6-0.20210726171848-ebce39e5e3d6
+# golang.org/x/tools v0.1.6-0.20210809225032-337cebd2c151
## explicit; go 1.17
golang.org/x/tools/cover
golang.org/x/tools/go/analysis
diff --git a/src/cmd/vet/testdata/print/print.go b/src/cmd/vet/testdata/print/print.go
index be42a37717..46240e87bf 100644
--- a/src/cmd/vet/testdata/print/print.go
+++ b/src/cmd/vet/testdata/print/print.go
@@ -491,10 +491,10 @@ type recursiveStringer int
func (s recursiveStringer) String() string {
_ = fmt.Sprintf("%d", s)
_ = fmt.Sprintf("%#v", s)
- _ = fmt.Sprintf("%v", s) // ERROR "Sprintf format %v with arg s causes recursive \(cmd/vet/testdata/print\.recursiveStringer\)\.String method call"
- _ = fmt.Sprintf("%v", &s) // ERROR "Sprintf format %v with arg &s causes recursive \(cmd/vet/testdata/print\.recursiveStringer\)\.String method call"
+ _ = fmt.Sprintf("%v", s) // ERROR "Sprintf format %v with arg s causes recursive .*String method call"
+ _ = fmt.Sprintf("%v", &s) // ERROR "Sprintf format %v with arg &s causes recursive .*String method call"
_ = fmt.Sprintf("%T", s) // ok; does not recursively call String
- return fmt.Sprintln(s) // ERROR "Sprintln arg s causes recursive call to \(cmd/vet/testdata/print\.recursiveStringer\)\.String method"
+ return fmt.Sprintln(s) // ERROR "Sprintln arg s causes recursive call to .*String method"
}
type recursivePtrStringer int
@@ -502,7 +502,7 @@ type recursivePtrStringer int
func (p *recursivePtrStringer) String() string {
_ = fmt.Sprintf("%v", *p)
_ = fmt.Sprint(&p) // ok; prints address
- return fmt.Sprintln(p) // ERROR "Sprintln arg p causes recursive call to \(\*cmd/vet/testdata/print\.recursivePtrStringer\)\.String method"
+ return fmt.Sprintln(p) // ERROR "Sprintln arg p causes recursive call to .*String method"
}
type BoolFormatter bool