aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/compile/internal/reflectdata/alg.go
diff options
context:
space:
mode:
authorKeith Randall <khr@golang.org>2021-05-23 12:38:59 -0700
committerKeith Randall <khr@golang.org>2021-05-24 19:06:05 +0000
commit873401df5b202a751523b8cbd92bf3a8aaf989c8 (patch)
tree001d8f6d64cde693933f495c6b414eeb64f538a4 /src/cmd/compile/internal/reflectdata/alg.go
parentb83610699a4ea7da22a146c0eefe0ae4d5ac4610 (diff)
downloadgo-873401df5b202a751523b8cbd92bf3a8aaf989c8.tar.gz
go-873401df5b202a751523b8cbd92bf3a8aaf989c8.zip
cmd/compile: ensure equal functions don't do unaligned loads
On architectures which don't support unaligned loads, make sure we don't generate code that requires them. Generated hash functions also matter in this respect, but they all look ok. Update #37716 Fixes #46283 Change-Id: I6197fdfe04da4428092c99bd871d93738789e16b Reviewed-on: https://go-review.googlesource.com/c/go/+/322151 Trust: Keith Randall <khr@golang.org> Trust: Josh Bleecher Snyder <josharian@gmail.com> Run-TryBot: Keith Randall <khr@golang.org> Reviewed-by: Cherry Mui <cherryyz@google.com> Reviewed-by: Josh Bleecher Snyder <josharian@gmail.com> Reviewed-by: eric fang <eric.fang@arm.com> TryBot-Result: Go Bot <gobot@golang.org>
Diffstat (limited to 'src/cmd/compile/internal/reflectdata/alg.go')
-rw-r--r--src/cmd/compile/internal/reflectdata/alg.go20
1 files changed, 20 insertions, 0 deletions
diff --git a/src/cmd/compile/internal/reflectdata/alg.go b/src/cmd/compile/internal/reflectdata/alg.go
index d12d9ca0a7..0707e0b61c 100644
--- a/src/cmd/compile/internal/reflectdata/alg.go
+++ b/src/cmd/compile/internal/reflectdata/alg.go
@@ -6,6 +6,7 @@ package reflectdata
import (
"fmt"
+ "math/bits"
"sort"
"cmd/compile/internal/base"
@@ -47,6 +48,11 @@ func eqCanPanic(t *types.Type) bool {
func AlgType(t *types.Type) types.AlgKind {
a, _ := types.AlgType(t)
if a == types.AMEM {
+ if t.Alignment() < int64(base.Ctxt.Arch.Alignment) && t.Alignment() < t.Width {
+ // For example, we can't treat [2]int16 as an int32 if int32s require
+ // 4-byte alignment. See issue 46283.
+ return a
+ }
switch t.Width {
case 0:
return types.AMEM0
@@ -769,6 +775,20 @@ func memrun(t *types.Type, start int) (size int64, next int) {
if f := t.Field(next); f.Sym.IsBlank() || !isRegularMemory(f.Type) {
break
}
+ // For issue 46283, don't combine fields if the resulting load would
+ // require a larger alignment than the component fields.
+ if base.Ctxt.Arch.Alignment > 1 {
+ align := t.Alignment()
+ if off := t.Field(start).Offset; off&(align-1) != 0 {
+ // Offset is less aligned than the containing type.
+ // Use offset to determine alignment.
+ align = 1 << uint(bits.TrailingZeros64(uint64(off)))
+ }
+ size := t.Field(next).End() - t.Field(start).Offset
+ if size > align {
+ break
+ }
+ }
}
return t.Field(next-1).End() - t.Field(start).Offset, next
}