aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/asm
diff options
context:
space:
mode:
authoreric fang <eric.fang@arm.com>2021-04-21 03:12:20 +0000
committereric fang <eric.fang@arm.com>2021-04-28 01:19:40 +0000
commit9726c78539f4945087c837201c1ec3545a318389 (patch)
tree377e22c003d5b6e7d6ca6ffb6bc3458bb366ac09 /src/cmd/asm
parentf439a762533f3a75eb928b67d0415010aa8a81d7 (diff)
downloadgo-9726c78539f4945087c837201c1ec3545a318389.tar.gz
go-9726c78539f4945087c837201c1ec3545a318389.zip
cmd/asm: add check for register and shift/extension combination on arm64
The current code lacks a check on whether the register and shift/extension combination is valid, for example the follow instructions also compiles. ADD F1<<1, R1, R3 ADD V1<<1, R1, R3 MOVW (R9)(F8.SXTW<<2), R19 VST1 R4.D[1], (R0) Actually only general registers can perform shift operations, and element and arrangement extensions are only applicable to vector registers. This CL adds a check for the register and shift/extension combination on arm64. Change-Id: I93dd9343e92a66899cba8eaf4e0ac5430e94692b Reviewed-on: https://go-review.googlesource.com/c/go/+/312571 Trust: eric fang <eric.fang@arm.com> Reviewed-by: eric fang <eric.fang@arm.com> Reviewed-by: Keith Randall <khr@golang.org> Run-TryBot: eric fang <eric.fang@arm.com> TryBot-Result: Go Bot <gobot@golang.org>
Diffstat (limited to 'src/cmd/asm')
-rw-r--r--src/cmd/asm/internal/arch/arm64.go306
-rw-r--r--src/cmd/asm/internal/asm/parse.go6
-rw-r--r--src/cmd/asm/internal/asm/testdata/arm64error.s14
3 files changed, 174 insertions, 152 deletions
diff --git a/src/cmd/asm/internal/arch/arm64.go b/src/cmd/asm/internal/arch/arm64.go
index e557630ca6..40d828a1fe 100644
--- a/src/cmd/asm/internal/arch/arm64.go
+++ b/src/cmd/asm/internal/arch/arm64.go
@@ -147,7 +147,16 @@ func arm64RegisterNumber(name string, n int16) (int16, bool) {
return 0, false
}
-// ARM64RegisterExtension parses an ARM64 register with extension or arrangement.
+// ARM64RegisterShift constructs an ARM64 register with shift operation.
+func ARM64RegisterShift(reg, op, count int16) (int64, error) {
+ // the base register of shift operations must be general register.
+ if reg > arm64.REG_R31 || reg < arm64.REG_R0 {
+ return 0, errors.New("invalid register for shift operation")
+ }
+ return int64(reg&31)<<16 | int64(op)<<22 | int64(uint16(count)), nil
+}
+
+// ARM64RegisterExtension constructs an ARM64 register with extension or arrangement.
func ARM64RegisterExtension(a *obj.Addr, ext string, reg, num int16, isAmount, isIndex bool) error {
Rnum := (reg & 31) + int16(num<<5)
if isAmount {
@@ -155,154 +164,163 @@ func ARM64RegisterExtension(a *obj.Addr, ext string, reg, num int16, isAmount, i
return errors.New("index shift amount is out of range")
}
}
- switch ext {
- case "UXTB":
- if !isAmount {
- return errors.New("invalid register extension")
- }
- if a.Type == obj.TYPE_MEM {
- return errors.New("invalid shift for the register offset addressing mode")
- }
- a.Reg = arm64.REG_UXTB + Rnum
- case "UXTH":
- if !isAmount {
- return errors.New("invalid register extension")
- }
- if a.Type == obj.TYPE_MEM {
- return errors.New("invalid shift for the register offset addressing mode")
- }
- a.Reg = arm64.REG_UXTH + Rnum
- case "UXTW":
- if !isAmount {
- return errors.New("invalid register extension")
- }
- // effective address of memory is a base register value and an offset register value.
- if a.Type == obj.TYPE_MEM {
- a.Index = arm64.REG_UXTW + Rnum
- } else {
- a.Reg = arm64.REG_UXTW + Rnum
- }
- case "UXTX":
- if !isAmount {
- return errors.New("invalid register extension")
- }
- if a.Type == obj.TYPE_MEM {
- return errors.New("invalid shift for the register offset addressing mode")
- }
- a.Reg = arm64.REG_UXTX + Rnum
- case "SXTB":
- if !isAmount {
- return errors.New("invalid register extension")
- }
- a.Reg = arm64.REG_SXTB + Rnum
- case "SXTH":
- if !isAmount {
- return errors.New("invalid register extension")
- }
- if a.Type == obj.TYPE_MEM {
- return errors.New("invalid shift for the register offset addressing mode")
- }
- a.Reg = arm64.REG_SXTH + Rnum
- case "SXTW":
- if !isAmount {
- return errors.New("invalid register extension")
- }
- if a.Type == obj.TYPE_MEM {
- a.Index = arm64.REG_SXTW + Rnum
- } else {
- a.Reg = arm64.REG_SXTW + Rnum
- }
- case "SXTX":
- if !isAmount {
- return errors.New("invalid register extension")
- }
- if a.Type == obj.TYPE_MEM {
- a.Index = arm64.REG_SXTX + Rnum
- } else {
- a.Reg = arm64.REG_SXTX + Rnum
- }
- case "LSL":
- if !isAmount {
- return errors.New("invalid register extension")
- }
- a.Index = arm64.REG_LSL + Rnum
- case "B8":
- if isIndex {
- return errors.New("invalid register extension")
- }
- a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_8B & 15) << 5)
- case "B16":
- if isIndex {
- return errors.New("invalid register extension")
- }
- a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_16B & 15) << 5)
- case "H4":
- if isIndex {
- return errors.New("invalid register extension")
- }
- a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_4H & 15) << 5)
- case "H8":
- if isIndex {
- return errors.New("invalid register extension")
- }
- a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_8H & 15) << 5)
- case "S2":
- if isIndex {
- return errors.New("invalid register extension")
- }
- a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_2S & 15) << 5)
- case "S4":
- if isIndex {
- return errors.New("invalid register extension")
- }
- a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_4S & 15) << 5)
- case "D1":
- if isIndex {
- return errors.New("invalid register extension")
- }
- a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_1D & 15) << 5)
- case "D2":
- if isIndex {
- return errors.New("invalid register extension")
- }
- a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_2D & 15) << 5)
- case "Q1":
- if isIndex {
- return errors.New("invalid register extension")
- }
- a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_1Q & 15) << 5)
- case "B":
- if !isIndex {
- return nil
- }
- a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_B & 15) << 5)
- a.Index = num
- case "H":
- if !isIndex {
- return nil
- }
- a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_H & 15) << 5)
- a.Index = num
- case "S":
- if !isIndex {
- return nil
- }
- a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_S & 15) << 5)
- a.Index = num
- case "D":
- if !isIndex {
- return nil
+ if reg <= arm64.REG_R31 && reg >= arm64.REG_R0 {
+ switch ext {
+ case "UXTB":
+ if !isAmount {
+ return errors.New("invalid register extension")
+ }
+ if a.Type == obj.TYPE_MEM {
+ return errors.New("invalid shift for the register offset addressing mode")
+ }
+ a.Reg = arm64.REG_UXTB + Rnum
+ case "UXTH":
+ if !isAmount {
+ return errors.New("invalid register extension")
+ }
+ if a.Type == obj.TYPE_MEM {
+ return errors.New("invalid shift for the register offset addressing mode")
+ }
+ a.Reg = arm64.REG_UXTH + Rnum
+ case "UXTW":
+ if !isAmount {
+ return errors.New("invalid register extension")
+ }
+ // effective address of memory is a base register value and an offset register value.
+ if a.Type == obj.TYPE_MEM {
+ a.Index = arm64.REG_UXTW + Rnum
+ } else {
+ a.Reg = arm64.REG_UXTW + Rnum
+ }
+ case "UXTX":
+ if !isAmount {
+ return errors.New("invalid register extension")
+ }
+ if a.Type == obj.TYPE_MEM {
+ return errors.New("invalid shift for the register offset addressing mode")
+ }
+ a.Reg = arm64.REG_UXTX + Rnum
+ case "SXTB":
+ if !isAmount {
+ return errors.New("invalid register extension")
+ }
+ a.Reg = arm64.REG_SXTB + Rnum
+ case "SXTH":
+ if !isAmount {
+ return errors.New("invalid register extension")
+ }
+ if a.Type == obj.TYPE_MEM {
+ return errors.New("invalid shift for the register offset addressing mode")
+ }
+ a.Reg = arm64.REG_SXTH + Rnum
+ case "SXTW":
+ if !isAmount {
+ return errors.New("invalid register extension")
+ }
+ if a.Type == obj.TYPE_MEM {
+ a.Index = arm64.REG_SXTW + Rnum
+ } else {
+ a.Reg = arm64.REG_SXTW + Rnum
+ }
+ case "SXTX":
+ if !isAmount {
+ return errors.New("invalid register extension")
+ }
+ if a.Type == obj.TYPE_MEM {
+ a.Index = arm64.REG_SXTX + Rnum
+ } else {
+ a.Reg = arm64.REG_SXTX + Rnum
+ }
+ case "LSL":
+ if !isAmount {
+ return errors.New("invalid register extension")
+ }
+ a.Index = arm64.REG_LSL + Rnum
+ default:
+ return errors.New("unsupported general register extension type: " + ext)
+
}
- a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_D & 15) << 5)
- a.Index = num
- default:
- return errors.New("unsupported register extension type: " + ext)
+ } else if reg <= arm64.REG_V31 && reg >= arm64.REG_V0 {
+ switch ext {
+ case "B8":
+ if isIndex {
+ return errors.New("invalid register extension")
+ }
+ a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_8B & 15) << 5)
+ case "B16":
+ if isIndex {
+ return errors.New("invalid register extension")
+ }
+ a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_16B & 15) << 5)
+ case "H4":
+ if isIndex {
+ return errors.New("invalid register extension")
+ }
+ a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_4H & 15) << 5)
+ case "H8":
+ if isIndex {
+ return errors.New("invalid register extension")
+ }
+ a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_8H & 15) << 5)
+ case "S2":
+ if isIndex {
+ return errors.New("invalid register extension")
+ }
+ a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_2S & 15) << 5)
+ case "S4":
+ if isIndex {
+ return errors.New("invalid register extension")
+ }
+ a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_4S & 15) << 5)
+ case "D1":
+ if isIndex {
+ return errors.New("invalid register extension")
+ }
+ a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_1D & 15) << 5)
+ case "D2":
+ if isIndex {
+ return errors.New("invalid register extension")
+ }
+ a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_2D & 15) << 5)
+ case "Q1":
+ if isIndex {
+ return errors.New("invalid register extension")
+ }
+ a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_1Q & 15) << 5)
+ case "B":
+ if !isIndex {
+ return nil
+ }
+ a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_B & 15) << 5)
+ a.Index = num
+ case "H":
+ if !isIndex {
+ return nil
+ }
+ a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_H & 15) << 5)
+ a.Index = num
+ case "S":
+ if !isIndex {
+ return nil
+ }
+ a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_S & 15) << 5)
+ a.Index = num
+ case "D":
+ if !isIndex {
+ return nil
+ }
+ a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_D & 15) << 5)
+ a.Index = num
+ default:
+ return errors.New("unsupported simd register extension type: " + ext)
+ }
+ } else {
+ return errors.New("invalid register and extension combination")
}
-
return nil
}
-// ARM64RegisterArrangement parses an ARM64 vector register arrangement.
+// ARM64RegisterArrangement constructs an ARM64 vector register arrangement.
func ARM64RegisterArrangement(reg int16, name, arng string) (int64, error) {
var curQ, curSize uint16
if name[0] != 'V' {
diff --git a/src/cmd/asm/internal/asm/parse.go b/src/cmd/asm/internal/asm/parse.go
index 2c7332877f..ab48632a44 100644
--- a/src/cmd/asm/internal/asm/parse.go
+++ b/src/cmd/asm/internal/asm/parse.go
@@ -689,7 +689,11 @@ func (p *Parser) registerShift(name string, prefix rune) int64 {
p.errorf("unexpected %s in register shift", tok.String())
}
if p.arch.Family == sys.ARM64 {
- return int64(r1&31)<<16 | int64(op)<<22 | int64(uint16(count))
+ off, err := arch.ARM64RegisterShift(r1, op, count)
+ if err != nil {
+ p.errorf(err.Error())
+ }
+ return off
} else {
return int64((r1 & 15) | op<<5 | count)
}
diff --git a/src/cmd/asm/internal/asm/testdata/arm64error.s b/src/cmd/asm/internal/asm/testdata/arm64error.s
index feb03abacd..66fc910759 100644
--- a/src/cmd/asm/internal/asm/testdata/arm64error.s
+++ b/src/cmd/asm/internal/asm/testdata/arm64error.s
@@ -96,13 +96,13 @@ TEXT errors(SB),$0
VMOV V8.H[9], R3 // ERROR "register element index out of range 0 to 7"
VMOV V8.S[4], R3 // ERROR "register element index out of range 0 to 3"
VMOV V8.D[2], R3 // ERROR "register element index out of range 0 to 1"
- VDUP V8.B[16], R3.B16 // ERROR "register element index out of range 0 to 15"
- VDUP V8.B[17], R3.B8 // ERROR "register element index out of range 0 to 15"
- VDUP V8.H[9], R3.H4 // ERROR "register element index out of range 0 to 7"
- VDUP V8.H[9], R3.H8 // ERROR "register element index out of range 0 to 7"
- VDUP V8.S[4], R3.S2 // ERROR "register element index out of range 0 to 3"
- VDUP V8.S[4], R3.S4 // ERROR "register element index out of range 0 to 3"
- VDUP V8.D[2], R3.D2 // ERROR "register element index out of range 0 to 1"
+ VDUP V8.B[16], V3.B16 // ERROR "register element index out of range 0 to 15"
+ VDUP V8.B[17], V3.B8 // ERROR "register element index out of range 0 to 15"
+ VDUP V8.H[9], V3.H4 // ERROR "register element index out of range 0 to 7"
+ VDUP V8.H[9], V3.H8 // ERROR "register element index out of range 0 to 7"
+ VDUP V8.S[4], V3.S2 // ERROR "register element index out of range 0 to 3"
+ VDUP V8.S[4], V3.S4 // ERROR "register element index out of range 0 to 3"
+ VDUP V8.D[2], V3.D2 // ERROR "register element index out of range 0 to 1"
VFMLA V1.D2, V12.D2, V3.S2 // ERROR "operand mismatch"
VFMLA V1.S2, V12.S2, V3.D2 // ERROR "operand mismatch"
VFMLA V1.S4, V12.S2, V3.D2 // ERROR "operand mismatch"