aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/internal/obj/arm64/asm7.go
diff options
context:
space:
mode:
authoreric fang <eric.fang@arm.com>2022-08-08 07:42:43 +0000
committerEric Fang <eric.fang@arm.com>2022-11-18 08:04:52 +0000
commit205f636e0ab491b7a06e7b7879abb897cdaaf1b4 (patch)
tree2166f22167a9b6b6d346660c936abba764ba0227 /src/cmd/internal/obj/arm64/asm7.go
parente4435cb8448514d2413f9d9aa3ee40738d26fd67 (diff)
downloadgo-205f636e0ab491b7a06e7b7879abb897cdaaf1b4.tar.gz
go-205f636e0ab491b7a06e7b7879abb897cdaaf1b4.zip
cmd/internal/obj/arm64: tidy literal pool
This CL cleans up the literal pool implementation and inserts an UNDEF instruction before the literal pool if the last instruction of the function is not an unconditional jump instruction, RET or ERET instruction. Change-Id: Ifecb9e3372478362dde246c1bc9bc8d527a469d5 Reviewed-on: https://go-review.googlesource.com/c/go/+/424134 Reviewed-by: David Chase <drchase@google.com> Reviewed-by: Joedian Reid <joedian@golang.org> Run-TryBot: Eric Fang <eric.fang@arm.com> TryBot-Result: Gopher Robot <gobot@golang.org>
Diffstat (limited to 'src/cmd/internal/obj/arm64/asm7.go')
-rw-r--r--src/cmd/internal/obj/arm64/asm7.go89
1 files changed, 45 insertions, 44 deletions
diff --git a/src/cmd/internal/obj/arm64/asm7.go b/src/cmd/internal/obj/arm64/asm7.go
index 324f32f18f..3d5abedc1e 100644
--- a/src/cmd/internal/obj/arm64/asm7.go
+++ b/src/cmd/internal/obj/arm64/asm7.go
@@ -1087,6 +1087,8 @@ func span7(ctxt *obj.Link, cursym *obj.LSym, newprog obj.ProgAlloc) {
c.ctxt.Diag("zero-width instruction\n%v", p)
}
}
+ pc += int64(m)
+
if o.flag&LFROM != 0 {
c.addpool(p, &p.From)
}
@@ -1096,13 +1098,8 @@ func span7(ctxt *obj.Link, cursym *obj.LSym, newprog obj.ProgAlloc) {
if o.flag&LTO != 0 {
c.addpool(p, &p.To)
}
-
- if p.As == AB || p.As == obj.ARET || p.As == AERET { /* TODO: other unconditional operations */
- c.checkpool(p, 0)
- }
- pc += int64(m)
if c.blitrl != nil {
- c.checkpool(p, 1)
+ c.checkpool(p)
}
}
@@ -1249,48 +1246,54 @@ func (c *ctxt7) isRestartable(p *obj.Prog) bool {
/*
* when the first reference to the literal pool threatens
* to go out of range of a 1Mb PC-relative offset
- * drop the pool now, and branch round it.
+ * drop the pool now.
*/
-func (c *ctxt7) checkpool(p *obj.Prog, skip int) {
- if c.pool.size >= 0xffff0 || !ispcdisp(int32(p.Pc+4+int64(c.pool.size)-int64(c.pool.start)+8)) {
- c.flushpool(p, skip)
- } else if p.Link == nil {
- c.flushpool(p, 2)
+func (c *ctxt7) checkpool(p *obj.Prog) {
+ // If the pool is going to go out of range or p is the last instruction of the function,
+ // flush the pool.
+ if c.pool.size >= 0xffff0 || !ispcdisp(int32(p.Pc+4+int64(c.pool.size)-int64(c.pool.start)+8)) || p.Link == nil {
+ c.flushpool(p)
}
}
-func (c *ctxt7) flushpool(p *obj.Prog, skip int) {
- if c.blitrl != nil {
- if skip != 0 {
- if c.ctxt.Debugvlog && skip == 1 {
- fmt.Printf("note: flush literal pool at %#x: len=%d ref=%x\n", uint64(p.Pc+4), c.pool.size, c.pool.start)
- }
- q := c.newprog()
+func (c *ctxt7) flushpool(p *obj.Prog) {
+ // Needs to insert a branch before flushing the pool.
+ // We don't need the jump if following an unconditional branch.
+ // TODO: other unconditional operations.
+ if !(p.As == AB || p.As == obj.ARET || p.As == AERET) {
+ if c.ctxt.Debugvlog {
+ fmt.Printf("note: flush literal pool at %#x: len=%d ref=%x\n", uint64(p.Pc+4), c.pool.size, c.pool.start)
+ }
+ q := c.newprog()
+ if p.Link == nil {
+ // If p is the last instruction of the function, insert an UNDEF instruction in case the
+ // exection fall through to the pool.
+ q.As = obj.AUNDEF
+ } else {
+ // Else insert a branch to the next instruction of p.
q.As = AB
q.To.Type = obj.TYPE_BRANCH
q.To.SetTarget(p.Link)
- q.Link = c.blitrl
- q.Pos = p.Pos
- c.blitrl = q
- } else if p.Pc+int64(c.pool.size)-int64(c.pool.start) < maxPCDisp {
- return
}
+ q.Link = c.blitrl
+ q.Pos = p.Pos
+ c.blitrl = q
+ }
- // The line number for constant pool entries doesn't really matter.
- // We set it to the line number of the preceding instruction so that
- // there are no deltas to encode in the pc-line tables.
- for q := c.blitrl; q != nil; q = q.Link {
- q.Pos = p.Pos
- }
+ // The line number for constant pool entries doesn't really matter.
+ // We set it to the line number of the preceding instruction so that
+ // there are no deltas to encode in the pc-line tables.
+ for q := c.blitrl; q != nil; q = q.Link {
+ q.Pos = p.Pos
+ }
- c.elitrl.Link = p.Link
- p.Link = c.blitrl
+ c.elitrl.Link = p.Link
+ p.Link = c.blitrl
- c.blitrl = nil /* BUG: should refer back to values until out-of-range */
- c.elitrl = nil
- c.pool.size = 0
- c.pool.start = 0
- }
+ c.blitrl = nil /* BUG: should refer back to values until out-of-range */
+ c.elitrl = nil
+ c.pool.size = 0
+ c.pool.start = 0
}
// addpool128 adds a 128-bit constant to literal pool by two consecutive DWORD
@@ -1359,23 +1362,21 @@ func (c *ctxt7) addpool(p *obj.Prog, a *obj.Addr) {
}
}
- q := c.newprog()
- *q = *t
if c.blitrl == nil {
- c.blitrl = q
+ c.blitrl = t
c.pool.start = uint32(p.Pc)
} else {
- c.elitrl.Link = q
+ c.elitrl.Link = t
}
- c.elitrl = q
- if q.As == ADWORD {
+ c.elitrl = t
+ if t.As == ADWORD {
// make DWORD 8-byte aligned, this is not required by ISA,
// just to avoid performance penalties when loading from
// the constant pool across a cache line.
c.pool.size = roundUp(c.pool.size, 8)
}
c.pool.size += uint32(sz)
- p.Pool = q
+ p.Pool = t
}
// roundUp rounds up x to "to".