aboutsummaryrefslogtreecommitdiff
path: root/src/runtime/export_test.go
diff options
context:
space:
mode:
authorMichael Anthony Knyszek <mknyszek@google.com>2020-07-23 21:02:05 +0000
committerMichael Knyszek <mknyszek@google.com>2020-10-26 17:26:08 +0000
commit42019613df2d9b6ad39e8ccf80861e75666025a0 (patch)
tree0a88f289b24d9b8eb5dd04cedf38196fd3c7c70d /src/runtime/export_test.go
parentce46f197b6c75281b77ee93338e2559671e28b01 (diff)
downloadgo-42019613df2d9b6ad39e8ccf80861e75666025a0.tar.gz
go-42019613df2d9b6ad39e8ccf80861e75666025a0.zip
runtime: make distributed/local malloc stats the source-of-truth
This change makes it so that various local malloc stats (excluding heap_scan and local_tinyallocs) are no longer written first to mheap fields but are instead accessed directly from each mcache. This change is part of a move toward having stats be distributed, and cleaning up some old code related to the stats. Note that because there's no central source-of-truth, when an mcache dies, it must donate its stats to another mcache. It's always safe to donate to the mcache for the 0th P, so do that. Change-Id: I2556093dbc27357cb9621c9b97671f3c00aa1173 Reviewed-on: https://go-review.googlesource.com/c/go/+/246964 Trust: Michael Knyszek <mknyszek@google.com> Run-TryBot: Michael Knyszek <mknyszek@google.com> TryBot-Result: Go Bot <gobot@golang.org> Reviewed-by: Michael Pratt <mpratt@google.com>
Diffstat (limited to 'src/runtime/export_test.go')
-rw-r--r--src/runtime/export_test.go26
1 files changed, 18 insertions, 8 deletions
diff --git a/src/runtime/export_test.go b/src/runtime/export_test.go
index e65b7b8ea7..d5a90ca65b 100644
--- a/src/runtime/export_test.go
+++ b/src/runtime/export_test.go
@@ -339,18 +339,28 @@ func ReadMemStatsSlow() (base, slow MemStats) {
// Add in frees. readmemstats_m flushed the cached stats, so
// these are up-to-date.
- var smallFree uint64
- slow.Frees = mheap_.nlargefree
- for i := range mheap_.nsmallfree {
- slow.Frees += mheap_.nsmallfree[i]
- bySize[i].Frees = mheap_.nsmallfree[i]
- bySize[i].Mallocs += mheap_.nsmallfree[i]
- smallFree += mheap_.nsmallfree[i] * uint64(class_to_size[i])
+ var largeFree, smallFree uint64
+ for _, p := range allp {
+ c := p.mcache
+ if c == nil {
+ continue
+ }
+ // Collect large allocation stats.
+ largeFree += uint64(c.local_largefree)
+ slow.Frees += uint64(c.local_nlargefree)
+
+ // Collect per-sizeclass stats.
+ for i := 0; i < _NumSizeClasses; i++ {
+ slow.Frees += uint64(c.local_nsmallfree[i])
+ bySize[i].Frees += uint64(c.local_nsmallfree[i])
+ bySize[i].Mallocs += uint64(c.local_nsmallfree[i])
+ smallFree += uint64(c.local_nsmallfree[i]) * uint64(class_to_size[i])
+ }
}
slow.Frees += memstats.tinyallocs
slow.Mallocs += slow.Frees
- slow.TotalAlloc = slow.Alloc + mheap_.largefree + smallFree
+ slow.TotalAlloc = slow.Alloc + largeFree + smallFree
for i := range slow.BySize {
slow.BySize[i].Mallocs = bySize[i].Mallocs