aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/compile/internal/inline/interleaved/interleaved.go
blob: 9b2efd7f2787cf45273b1507153f0efb2e9ed75a (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
// Copyright 2023 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.

// Package interleaved implements the interleaved devirtualization and
// inlining pass.
package interleaved

import (
	"cmd/compile/internal/base"
	"cmd/compile/internal/devirtualize"
	"cmd/compile/internal/inline"
	"cmd/compile/internal/inline/inlheur"
	"cmd/compile/internal/ir"
	"cmd/compile/internal/pgoir"
	"cmd/compile/internal/typecheck"
	"fmt"
)

// DevirtualizeAndInlinePackage interleaves devirtualization and inlining on
// all functions within pkg.
func DevirtualizeAndInlinePackage(pkg *ir.Package, profile *pgoir.Profile) {
	if profile != nil && base.Debug.PGODevirtualize > 0 {
		// TODO(mdempsky): Integrate into DevirtualizeAndInlineFunc below.
		ir.VisitFuncsBottomUp(typecheck.Target.Funcs, func(list []*ir.Func, recursive bool) {
			for _, fn := range list {
				devirtualize.ProfileGuided(fn, profile)
			}
		})
		ir.CurFunc = nil
	}

	if base.Flag.LowerL != 0 {
		inlheur.SetupScoreAdjustments()
	}

	var inlProfile *pgoir.Profile // copy of profile for inlining
	if base.Debug.PGOInline != 0 {
		inlProfile = profile
	}

	// First compute inlinability of all functions in the package.
	inline.CanInlineFuncs(pkg.Funcs, inlProfile)

	// Now we make a second pass to do devirtualization and inlining of
	// calls. Order here should not matter.
	for _, fn := range pkg.Funcs {
		DevirtualizeAndInlineFunc(fn, inlProfile)
	}

	if base.Flag.LowerL != 0 {
		// Perform a garbage collection of hidden closures functions that
		// are no longer reachable from top-level functions following
		// inlining. See #59404 and #59638 for more context.
		inline.GarbageCollectUnreferencedHiddenClosures()

		if base.Debug.DumpInlFuncProps != "" {
			inlheur.DumpFuncProps(nil, base.Debug.DumpInlFuncProps)
		}
		if inlheur.Enabled() {
			inline.PostProcessCallSites(inlProfile)
			inlheur.TearDown()
		}
	}
}

// DevirtualizeAndInlineFunc interleaves devirtualization and inlining
// on a single function.
func DevirtualizeAndInlineFunc(fn *ir.Func, profile *pgoir.Profile) {
	ir.WithFunc(fn, func() {
		if base.Flag.LowerL != 0 {
			if inlheur.Enabled() && !fn.Wrapper() {
				inlheur.ScoreCalls(fn)
				defer inlheur.ScoreCallsCleanup()
			}
			if base.Debug.DumpInlFuncProps != "" && !fn.Wrapper() {
				inlheur.DumpFuncProps(fn, base.Debug.DumpInlFuncProps)
			}
		}

		bigCaller := base.Flag.LowerL != 0 && inline.IsBigFunc(fn)
		if bigCaller && base.Flag.LowerM > 1 {
			fmt.Printf("%v: function %v considered 'big'; reducing max cost of inlinees\n", ir.Line(fn), fn)
		}

		match := func(n ir.Node) bool {
			switch n := n.(type) {
			case *ir.CallExpr:
				return true
			case *ir.TailCallStmt:
				n.Call.NoInline = true // can't inline yet
			}
			return false
		}

		edit := func(n ir.Node) ir.Node {
			call, ok := n.(*ir.CallExpr)
			if !ok { // previously inlined
				return nil
			}

			devirtualize.StaticCall(call)
			if inlCall := inline.TryInlineCall(fn, call, bigCaller, profile); inlCall != nil {
				return inlCall
			}
			return nil
		}

		fixpoint(fn, match, edit)
	})
}

// fixpoint repeatedly edits a function until it stabilizes.
//
// First, fixpoint applies match to every node n within fn. Then it
// iteratively applies edit to each node satisfying match(n).
//
// If edit(n) returns nil, no change is made. Otherwise, the result
// replaces n in fn's body, and fixpoint iterates at least once more.
//
// After an iteration where all edit calls return nil, fixpoint
// returns.
func fixpoint(fn *ir.Func, match func(ir.Node) bool, edit func(ir.Node) ir.Node) {
	// Consider the expression "f(g())". We want to be able to replace
	// "g()" in-place with its inlined representation. But if we first
	// replace "f(...)" with its inlined representation, then "g()" will
	// instead appear somewhere within this new AST.
	//
	// To mitigate this, each matched node n is wrapped in a ParenExpr,
	// so we can reliably replace n in-place by assigning ParenExpr.X.
	// It's safe to use ParenExpr here, because typecheck already
	// removed them all.

	var parens []*ir.ParenExpr
	var mark func(ir.Node) ir.Node
	mark = func(n ir.Node) ir.Node {
		if _, ok := n.(*ir.ParenExpr); ok {
			return n // already visited n.X before wrapping
		}

		ok := match(n)

		ir.EditChildren(n, mark)

		if ok {
			paren := ir.NewParenExpr(n.Pos(), n)
			paren.SetType(n.Type())
			paren.SetTypecheck(n.Typecheck())

			parens = append(parens, paren)
			n = paren
		}

		return n
	}
	ir.EditChildren(fn, mark)

	// Edit until stable.
	for {
		done := true

		for i := 0; i < len(parens); i++ { // can't use "range parens" here
			paren := parens[i]
			if new := edit(paren.X); new != nil {
				// Update AST and recursively mark nodes.
				paren.X = new
				ir.EditChildren(new, mark) // mark may append to parens
				done = false
			}
		}

		if done {
			break
		}
	}

	// Finally, remove any parens we inserted.
	if len(parens) == 0 {
		return // short circuit
	}
	var unparen func(ir.Node) ir.Node
	unparen = func(n ir.Node) ir.Node {
		if paren, ok := n.(*ir.ParenExpr); ok {
			n = paren.X
		}
		ir.EditChildren(n, unparen)
		return n
	}
	ir.EditChildren(fn, unparen)
}