aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/compile/internal/ssa/rewriteAMD64splitload.go
diff options
context:
space:
mode:
authorAlberto Donizetti <alb.donizetti@gmail.com>2021-03-05 11:22:13 +0100
committerAlberto Donizetti <alb.donizetti@gmail.com>2021-03-09 08:19:14 +0000
commitb70a2bc9c612de35b765712bd689865f6a1716b6 (patch)
tree7e538fba518ca7a8a706601214c5e700d7979979 /src/cmd/compile/internal/ssa/rewriteAMD64splitload.go
parent437d229e2ac4cda4265090375b94d74ca218a846 (diff)
downloadgo-b70a2bc9c612de35b765712bd689865f6a1716b6.tar.gz
go-b70a2bc9c612de35b765712bd689865f6a1716b6.zip
cmd/compile: make ValAndOff.{Val,Off} return an int32
The ValAndOff type is a 64bit integer holding a 32bit value and a 32bit offset in each half, but for historical reasons its Val and Off methods returned an int64. This was convenient when AuxInt was always an int64, but now that AuxInts are typed we can return int32 from Val and Off and get rid of a several casts and now unnecessary range checks. This change: - changes the Val and Off methods to return an int32 (from int64); - adds Val64 and Off64 methods for convenience in the few remaining places (in the ssa.go files) where Val and Off are stored in int64 fields; - deletes makeValAndOff64, renames makeValAndOff32 to makeValAndOff - deletes a few ValAndOff methods that are now unused; - removes several validOff/validValAndOff check that will always return true. Passes: GOARCH=amd64 gotip build -toolexec 'toolstash -cmp' -a std GOARCH=386 gotip build -toolexec 'toolstash -cmp' -a std GOARCH=s390x gotip build -toolexec 'toolstash -cmp' -a std (the three GOARCHs with SSA rules files impacted by the change). Change-Id: I2abbbf42188c798631b94d3a55ca44256f140be7 Reviewed-on: https://go-review.googlesource.com/c/go/+/299149 Trust: Alberto Donizetti <alb.donizetti@gmail.com> Trust: Keith Randall <khr@golang.org> Run-TryBot: Alberto Donizetti <alb.donizetti@gmail.com> TryBot-Result: Go Bot <gobot@golang.org> Reviewed-by: Keith Randall <khr@golang.org>
Diffstat (limited to 'src/cmd/compile/internal/ssa/rewriteAMD64splitload.go')
-rw-r--r--src/cmd/compile/internal/ssa/rewriteAMD64splitload.go100
1 files changed, 50 insertions, 50 deletions
diff --git a/src/cmd/compile/internal/ssa/rewriteAMD64splitload.go b/src/cmd/compile/internal/ssa/rewriteAMD64splitload.go
index 65bfec0f68..1b8680c052 100644
--- a/src/cmd/compile/internal/ssa/rewriteAMD64splitload.go
+++ b/src/cmd/compile/internal/ssa/rewriteAMD64splitload.go
@@ -59,7 +59,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPBconstload(v *Value) bool {
typ := &b.Func.Config.Types
// match: (CMPBconstload {sym} [vo] ptr mem)
// cond: vo.Val() == 0
- // result: (TESTB x:(MOVBload {sym} [vo.Off32()] ptr mem) x)
+ // result: (TESTB x:(MOVBload {sym} [vo.Off()] ptr mem) x)
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
@@ -70,7 +70,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPBconstload(v *Value) bool {
}
v.reset(OpAMD64TESTB)
x := b.NewValue0(v.Pos, OpAMD64MOVBload, typ.UInt8)
- x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.AuxInt = int32ToAuxInt(vo.Off())
x.Aux = symToAux(sym)
x.AddArg2(ptr, mem)
v.AddArg2(x, x)
@@ -78,7 +78,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPBconstload(v *Value) bool {
}
// match: (CMPBconstload {sym} [vo] ptr mem)
// cond: vo.Val() != 0
- // result: (CMPBconst (MOVBload {sym} [vo.Off32()] ptr mem) [vo.Val8()])
+ // result: (CMPBconst (MOVBload {sym} [vo.Off()] ptr mem) [vo.Val8()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
@@ -90,7 +90,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPBconstload(v *Value) bool {
v.reset(OpAMD64CMPBconst)
v.AuxInt = int8ToAuxInt(vo.Val8())
v0 := b.NewValue0(v.Pos, OpAMD64MOVBload, typ.UInt8)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg2(ptr, mem)
v.AddArg(v0)
@@ -106,7 +106,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPBconstloadidx1(v *Value) bool {
typ := &b.Func.Config.Types
// match: (CMPBconstloadidx1 {sym} [vo] ptr idx mem)
// cond: vo.Val() == 0
- // result: (TESTB x:(MOVBloadidx1 {sym} [vo.Off32()] ptr idx mem) x)
+ // result: (TESTB x:(MOVBloadidx1 {sym} [vo.Off()] ptr idx mem) x)
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
@@ -118,7 +118,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPBconstloadidx1(v *Value) bool {
}
v.reset(OpAMD64TESTB)
x := b.NewValue0(v.Pos, OpAMD64MOVBloadidx1, typ.UInt8)
- x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.AuxInt = int32ToAuxInt(vo.Off())
x.Aux = symToAux(sym)
x.AddArg3(ptr, idx, mem)
v.AddArg2(x, x)
@@ -126,7 +126,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPBconstloadidx1(v *Value) bool {
}
// match: (CMPBconstloadidx1 {sym} [vo] ptr idx mem)
// cond: vo.Val() != 0
- // result: (CMPBconst (MOVBloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val8()])
+ // result: (CMPBconst (MOVBloadidx1 {sym} [vo.Off()] ptr idx mem) [vo.Val8()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
@@ -139,7 +139,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPBconstloadidx1(v *Value) bool {
v.reset(OpAMD64CMPBconst)
v.AuxInt = int8ToAuxInt(vo.Val8())
v0 := b.NewValue0(v.Pos, OpAMD64MOVBloadidx1, typ.UInt8)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg3(ptr, idx, mem)
v.AddArg(v0)
@@ -202,7 +202,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPLconstload(v *Value) bool {
typ := &b.Func.Config.Types
// match: (CMPLconstload {sym} [vo] ptr mem)
// cond: vo.Val() == 0
- // result: (TESTL x:(MOVLload {sym} [vo.Off32()] ptr mem) x)
+ // result: (TESTL x:(MOVLload {sym} [vo.Off()] ptr mem) x)
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
@@ -213,7 +213,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPLconstload(v *Value) bool {
}
v.reset(OpAMD64TESTL)
x := b.NewValue0(v.Pos, OpAMD64MOVLload, typ.UInt32)
- x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.AuxInt = int32ToAuxInt(vo.Off())
x.Aux = symToAux(sym)
x.AddArg2(ptr, mem)
v.AddArg2(x, x)
@@ -221,7 +221,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPLconstload(v *Value) bool {
}
// match: (CMPLconstload {sym} [vo] ptr mem)
// cond: vo.Val() != 0
- // result: (CMPLconst (MOVLload {sym} [vo.Off32()] ptr mem) [vo.Val32()])
+ // result: (CMPLconst (MOVLload {sym} [vo.Off()] ptr mem) [vo.Val()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
@@ -231,9 +231,9 @@ func rewriteValueAMD64splitload_OpAMD64CMPLconstload(v *Value) bool {
break
}
v.reset(OpAMD64CMPLconst)
- v.AuxInt = int32ToAuxInt(vo.Val32())
+ v.AuxInt = int32ToAuxInt(vo.Val())
v0 := b.NewValue0(v.Pos, OpAMD64MOVLload, typ.UInt32)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg2(ptr, mem)
v.AddArg(v0)
@@ -249,7 +249,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPLconstloadidx1(v *Value) bool {
typ := &b.Func.Config.Types
// match: (CMPLconstloadidx1 {sym} [vo] ptr idx mem)
// cond: vo.Val() == 0
- // result: (TESTL x:(MOVLloadidx1 {sym} [vo.Off32()] ptr idx mem) x)
+ // result: (TESTL x:(MOVLloadidx1 {sym} [vo.Off()] ptr idx mem) x)
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
@@ -261,7 +261,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPLconstloadidx1(v *Value) bool {
}
v.reset(OpAMD64TESTL)
x := b.NewValue0(v.Pos, OpAMD64MOVLloadidx1, typ.UInt32)
- x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.AuxInt = int32ToAuxInt(vo.Off())
x.Aux = symToAux(sym)
x.AddArg3(ptr, idx, mem)
v.AddArg2(x, x)
@@ -269,7 +269,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPLconstloadidx1(v *Value) bool {
}
// match: (CMPLconstloadidx1 {sym} [vo] ptr idx mem)
// cond: vo.Val() != 0
- // result: (CMPLconst (MOVLloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
+ // result: (CMPLconst (MOVLloadidx1 {sym} [vo.Off()] ptr idx mem) [vo.Val()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
@@ -280,9 +280,9 @@ func rewriteValueAMD64splitload_OpAMD64CMPLconstloadidx1(v *Value) bool {
break
}
v.reset(OpAMD64CMPLconst)
- v.AuxInt = int32ToAuxInt(vo.Val32())
+ v.AuxInt = int32ToAuxInt(vo.Val())
v0 := b.NewValue0(v.Pos, OpAMD64MOVLloadidx1, typ.UInt32)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg3(ptr, idx, mem)
v.AddArg(v0)
@@ -298,7 +298,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPLconstloadidx4(v *Value) bool {
typ := &b.Func.Config.Types
// match: (CMPLconstloadidx4 {sym} [vo] ptr idx mem)
// cond: vo.Val() == 0
- // result: (TESTL x:(MOVLloadidx4 {sym} [vo.Off32()] ptr idx mem) x)
+ // result: (TESTL x:(MOVLloadidx4 {sym} [vo.Off()] ptr idx mem) x)
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
@@ -310,7 +310,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPLconstloadidx4(v *Value) bool {
}
v.reset(OpAMD64TESTL)
x := b.NewValue0(v.Pos, OpAMD64MOVLloadidx4, typ.UInt32)
- x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.AuxInt = int32ToAuxInt(vo.Off())
x.Aux = symToAux(sym)
x.AddArg3(ptr, idx, mem)
v.AddArg2(x, x)
@@ -318,7 +318,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPLconstloadidx4(v *Value) bool {
}
// match: (CMPLconstloadidx4 {sym} [vo] ptr idx mem)
// cond: vo.Val() != 0
- // result: (CMPLconst (MOVLloadidx4 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
+ // result: (CMPLconst (MOVLloadidx4 {sym} [vo.Off()] ptr idx mem) [vo.Val()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
@@ -329,9 +329,9 @@ func rewriteValueAMD64splitload_OpAMD64CMPLconstloadidx4(v *Value) bool {
break
}
v.reset(OpAMD64CMPLconst)
- v.AuxInt = int32ToAuxInt(vo.Val32())
+ v.AuxInt = int32ToAuxInt(vo.Val())
v0 := b.NewValue0(v.Pos, OpAMD64MOVLloadidx4, typ.UInt32)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg3(ptr, idx, mem)
v.AddArg(v0)
@@ -419,7 +419,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPQconstload(v *Value) bool {
typ := &b.Func.Config.Types
// match: (CMPQconstload {sym} [vo] ptr mem)
// cond: vo.Val() == 0
- // result: (TESTQ x:(MOVQload {sym} [vo.Off32()] ptr mem) x)
+ // result: (TESTQ x:(MOVQload {sym} [vo.Off()] ptr mem) x)
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
@@ -430,7 +430,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPQconstload(v *Value) bool {
}
v.reset(OpAMD64TESTQ)
x := b.NewValue0(v.Pos, OpAMD64MOVQload, typ.UInt64)
- x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.AuxInt = int32ToAuxInt(vo.Off())
x.Aux = symToAux(sym)
x.AddArg2(ptr, mem)
v.AddArg2(x, x)
@@ -438,7 +438,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPQconstload(v *Value) bool {
}
// match: (CMPQconstload {sym} [vo] ptr mem)
// cond: vo.Val() != 0
- // result: (CMPQconst (MOVQload {sym} [vo.Off32()] ptr mem) [vo.Val32()])
+ // result: (CMPQconst (MOVQload {sym} [vo.Off()] ptr mem) [vo.Val()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
@@ -448,9 +448,9 @@ func rewriteValueAMD64splitload_OpAMD64CMPQconstload(v *Value) bool {
break
}
v.reset(OpAMD64CMPQconst)
- v.AuxInt = int32ToAuxInt(vo.Val32())
+ v.AuxInt = int32ToAuxInt(vo.Val())
v0 := b.NewValue0(v.Pos, OpAMD64MOVQload, typ.UInt64)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg2(ptr, mem)
v.AddArg(v0)
@@ -466,7 +466,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPQconstloadidx1(v *Value) bool {
typ := &b.Func.Config.Types
// match: (CMPQconstloadidx1 {sym} [vo] ptr idx mem)
// cond: vo.Val() == 0
- // result: (TESTQ x:(MOVQloadidx1 {sym} [vo.Off32()] ptr idx mem) x)
+ // result: (TESTQ x:(MOVQloadidx1 {sym} [vo.Off()] ptr idx mem) x)
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
@@ -478,7 +478,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPQconstloadidx1(v *Value) bool {
}
v.reset(OpAMD64TESTQ)
x := b.NewValue0(v.Pos, OpAMD64MOVQloadidx1, typ.UInt64)
- x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.AuxInt = int32ToAuxInt(vo.Off())
x.Aux = symToAux(sym)
x.AddArg3(ptr, idx, mem)
v.AddArg2(x, x)
@@ -486,7 +486,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPQconstloadidx1(v *Value) bool {
}
// match: (CMPQconstloadidx1 {sym} [vo] ptr idx mem)
// cond: vo.Val() != 0
- // result: (CMPQconst (MOVQloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
+ // result: (CMPQconst (MOVQloadidx1 {sym} [vo.Off()] ptr idx mem) [vo.Val()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
@@ -497,9 +497,9 @@ func rewriteValueAMD64splitload_OpAMD64CMPQconstloadidx1(v *Value) bool {
break
}
v.reset(OpAMD64CMPQconst)
- v.AuxInt = int32ToAuxInt(vo.Val32())
+ v.AuxInt = int32ToAuxInt(vo.Val())
v0 := b.NewValue0(v.Pos, OpAMD64MOVQloadidx1, typ.UInt64)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg3(ptr, idx, mem)
v.AddArg(v0)
@@ -515,7 +515,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPQconstloadidx8(v *Value) bool {
typ := &b.Func.Config.Types
// match: (CMPQconstloadidx8 {sym} [vo] ptr idx mem)
// cond: vo.Val() == 0
- // result: (TESTQ x:(MOVQloadidx8 {sym} [vo.Off32()] ptr idx mem) x)
+ // result: (TESTQ x:(MOVQloadidx8 {sym} [vo.Off()] ptr idx mem) x)
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
@@ -527,7 +527,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPQconstloadidx8(v *Value) bool {
}
v.reset(OpAMD64TESTQ)
x := b.NewValue0(v.Pos, OpAMD64MOVQloadidx8, typ.UInt64)
- x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.AuxInt = int32ToAuxInt(vo.Off())
x.Aux = symToAux(sym)
x.AddArg3(ptr, idx, mem)
v.AddArg2(x, x)
@@ -535,7 +535,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPQconstloadidx8(v *Value) bool {
}
// match: (CMPQconstloadidx8 {sym} [vo] ptr idx mem)
// cond: vo.Val() != 0
- // result: (CMPQconst (MOVQloadidx8 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
+ // result: (CMPQconst (MOVQloadidx8 {sym} [vo.Off()] ptr idx mem) [vo.Val()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
@@ -546,9 +546,9 @@ func rewriteValueAMD64splitload_OpAMD64CMPQconstloadidx8(v *Value) bool {
break
}
v.reset(OpAMD64CMPQconst)
- v.AuxInt = int32ToAuxInt(vo.Val32())
+ v.AuxInt = int32ToAuxInt(vo.Val())
v0 := b.NewValue0(v.Pos, OpAMD64MOVQloadidx8, typ.UInt64)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg3(ptr, idx, mem)
v.AddArg(v0)
@@ -636,7 +636,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPWconstload(v *Value) bool {
typ := &b.Func.Config.Types
// match: (CMPWconstload {sym} [vo] ptr mem)
// cond: vo.Val() == 0
- // result: (TESTW x:(MOVWload {sym} [vo.Off32()] ptr mem) x)
+ // result: (TESTW x:(MOVWload {sym} [vo.Off()] ptr mem) x)
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
@@ -647,7 +647,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPWconstload(v *Value) bool {
}
v.reset(OpAMD64TESTW)
x := b.NewValue0(v.Pos, OpAMD64MOVWload, typ.UInt16)
- x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.AuxInt = int32ToAuxInt(vo.Off())
x.Aux = symToAux(sym)
x.AddArg2(ptr, mem)
v.AddArg2(x, x)
@@ -655,7 +655,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPWconstload(v *Value) bool {
}
// match: (CMPWconstload {sym} [vo] ptr mem)
// cond: vo.Val() != 0
- // result: (CMPWconst (MOVWload {sym} [vo.Off32()] ptr mem) [vo.Val16()])
+ // result: (CMPWconst (MOVWload {sym} [vo.Off()] ptr mem) [vo.Val16()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
@@ -667,7 +667,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPWconstload(v *Value) bool {
v.reset(OpAMD64CMPWconst)
v.AuxInt = int16ToAuxInt(vo.Val16())
v0 := b.NewValue0(v.Pos, OpAMD64MOVWload, typ.UInt16)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg2(ptr, mem)
v.AddArg(v0)
@@ -683,7 +683,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPWconstloadidx1(v *Value) bool {
typ := &b.Func.Config.Types
// match: (CMPWconstloadidx1 {sym} [vo] ptr idx mem)
// cond: vo.Val() == 0
- // result: (TESTW x:(MOVWloadidx1 {sym} [vo.Off32()] ptr idx mem) x)
+ // result: (TESTW x:(MOVWloadidx1 {sym} [vo.Off()] ptr idx mem) x)
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
@@ -695,7 +695,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPWconstloadidx1(v *Value) bool {
}
v.reset(OpAMD64TESTW)
x := b.NewValue0(v.Pos, OpAMD64MOVWloadidx1, typ.UInt16)
- x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.AuxInt = int32ToAuxInt(vo.Off())
x.Aux = symToAux(sym)
x.AddArg3(ptr, idx, mem)
v.AddArg2(x, x)
@@ -703,7 +703,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPWconstloadidx1(v *Value) bool {
}
// match: (CMPWconstloadidx1 {sym} [vo] ptr idx mem)
// cond: vo.Val() != 0
- // result: (CMPWconst (MOVWloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val16()])
+ // result: (CMPWconst (MOVWloadidx1 {sym} [vo.Off()] ptr idx mem) [vo.Val16()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
@@ -716,7 +716,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPWconstloadidx1(v *Value) bool {
v.reset(OpAMD64CMPWconst)
v.AuxInt = int16ToAuxInt(vo.Val16())
v0 := b.NewValue0(v.Pos, OpAMD64MOVWloadidx1, typ.UInt16)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg3(ptr, idx, mem)
v.AddArg(v0)
@@ -732,7 +732,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPWconstloadidx2(v *Value) bool {
typ := &b.Func.Config.Types
// match: (CMPWconstloadidx2 {sym} [vo] ptr idx mem)
// cond: vo.Val() == 0
- // result: (TESTW x:(MOVWloadidx2 {sym} [vo.Off32()] ptr idx mem) x)
+ // result: (TESTW x:(MOVWloadidx2 {sym} [vo.Off()] ptr idx mem) x)
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
@@ -744,7 +744,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPWconstloadidx2(v *Value) bool {
}
v.reset(OpAMD64TESTW)
x := b.NewValue0(v.Pos, OpAMD64MOVWloadidx2, typ.UInt16)
- x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.AuxInt = int32ToAuxInt(vo.Off())
x.Aux = symToAux(sym)
x.AddArg3(ptr, idx, mem)
v.AddArg2(x, x)
@@ -752,7 +752,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPWconstloadidx2(v *Value) bool {
}
// match: (CMPWconstloadidx2 {sym} [vo] ptr idx mem)
// cond: vo.Val() != 0
- // result: (CMPWconst (MOVWloadidx2 {sym} [vo.Off32()] ptr idx mem) [vo.Val16()])
+ // result: (CMPWconst (MOVWloadidx2 {sym} [vo.Off()] ptr idx mem) [vo.Val16()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
@@ -765,7 +765,7 @@ func rewriteValueAMD64splitload_OpAMD64CMPWconstloadidx2(v *Value) bool {
v.reset(OpAMD64CMPWconst)
v.AuxInt = int16ToAuxInt(vo.Val16())
v0 := b.NewValue0(v.Pos, OpAMD64MOVWloadidx2, typ.UInt16)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg3(ptr, idx, mem)
v.AddArg(v0)