aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/compile/internal/arm64/ssa.go
diff options
context:
space:
mode:
Diffstat (limited to 'src/cmd/compile/internal/arm64/ssa.go')
-rw-r--r--src/cmd/compile/internal/arm64/ssa.go63
1 files changed, 63 insertions, 0 deletions
diff --git a/src/cmd/compile/internal/arm64/ssa.go b/src/cmd/compile/internal/arm64/ssa.go
index 52580242dc..24281300ae 100644
--- a/src/cmd/compile/internal/arm64/ssa.go
+++ b/src/cmd/compile/internal/arm64/ssa.go
@@ -182,6 +182,11 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
p.From.Reg = x
p.To.Type = obj.TYPE_REG
p.To.Reg = y
+ case ssa.OpARM64MOVDnop:
+ if gc.SSARegNum(v) != gc.SSARegNum(v.Args[0]) {
+ v.Fatalf("input[0] and output not in same register %s", v.LongString())
+ }
+ // nothing to do
case ssa.OpLoadReg:
if v.Type.IsFlags() {
v.Unimplementedf("load flags not implemented: %v", v.LongString())
@@ -581,6 +586,64 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
gc.Maxarg = v.AuxInt
}
case ssa.OpARM64LoweredNilCheck:
+ // Optimization - if the subsequent block has a load or store
+ // at the same address, we don't need to issue this instruction.
+ mem := v.Args[1]
+ for _, w := range v.Block.Succs[0].Block().Values {
+ if w.Op == ssa.OpPhi {
+ if w.Type.IsMemory() {
+ mem = w
+ }
+ continue
+ }
+ if len(w.Args) == 0 || !w.Args[len(w.Args)-1].Type.IsMemory() {
+ // w doesn't use a store - can't be a memory op.
+ continue
+ }
+ if w.Args[len(w.Args)-1] != mem {
+ v.Fatalf("wrong store after nilcheck v=%s w=%s", v, w)
+ }
+ switch w.Op {
+ case ssa.OpARM64MOVBload, ssa.OpARM64MOVBUload, ssa.OpARM64MOVHload, ssa.OpARM64MOVHUload,
+ ssa.OpARM64MOVWload, ssa.OpARM64MOVWUload, ssa.OpARM64MOVDload,
+ ssa.OpARM64FMOVSload, ssa.OpARM64FMOVDload,
+ ssa.OpARM64MOVBstore, ssa.OpARM64MOVHstore, ssa.OpARM64MOVWstore, ssa.OpARM64MOVDstore,
+ ssa.OpARM64FMOVSstore, ssa.OpARM64FMOVDstore:
+ // arg0 is ptr, auxint is offset
+ if w.Args[0] == v.Args[0] && w.Aux == nil && w.AuxInt >= 0 && w.AuxInt < minZeroPage {
+ if gc.Debug_checknil != 0 && int(v.Line) > 1 {
+ gc.Warnl(v.Line, "removed nil check")
+ }
+ return
+ }
+ case ssa.OpARM64DUFFZERO, ssa.OpARM64LoweredZero:
+ // arg0 is ptr
+ if w.Args[0] == v.Args[0] {
+ if gc.Debug_checknil != 0 && int(v.Line) > 1 {
+ gc.Warnl(v.Line, "removed nil check")
+ }
+ return
+ }
+ case ssa.OpARM64LoweredMove:
+ // arg0 is dst ptr, arg1 is src ptr
+ if w.Args[0] == v.Args[0] || w.Args[1] == v.Args[0] {
+ if gc.Debug_checknil != 0 && int(v.Line) > 1 {
+ gc.Warnl(v.Line, "removed nil check")
+ }
+ return
+ }
+ default:
+ }
+ if w.Type.IsMemory() {
+ if w.Op == ssa.OpVarDef || w.Op == ssa.OpVarKill || w.Op == ssa.OpVarLive {
+ // these ops are OK
+ mem = w
+ continue
+ }
+ // We can't delay the nil check past the next store.
+ break
+ }
+ }
// Issue a load which will fault if arg is nil.
p := gc.Prog(arm64.AMOVB)
p.From.Type = obj.TYPE_MEM