aboutsummaryrefslogtreecommitdiff
path: root/src/cmd/compile/internal/ssa/rewriteWasm.go
diff options
context:
space:
mode:
authorAgniva De Sarker <agnivade@yahoo.co.in>2019-07-10 00:00:13 +0530
committerAgniva De Sarker <agniva.quicksilver@gmail.com>2019-08-28 05:55:52 +0000
commit7be97af2ff6d4bdc8a52cb72677bbd68703489fd (patch)
tree2c1f3e4e4631ab0c375d563773212cc46e699966 /src/cmd/compile/internal/ssa/rewriteWasm.go
parent07f04607370bf46c6f3a12ac3e182068da403b86 (diff)
downloadgo-7be97af2ff6d4bdc8a52cb72677bbd68703489fd.tar.gz
go-7be97af2ff6d4bdc8a52cb72677bbd68703489fd.zip
cmd/compile: apply optimization for readonly globals on wasm
Extend the optimization introduced in CL 141118 to the wasm architecture. And for reference, the rules trigger 212 times while building std and cmd $GOOS=js GOARCH=wasm gotip build std cmd $grep -E "Wasm.rules:44(1|2|3|4)" rulelog | wc -l 212 Updates #26498 Change-Id: I153684a2b98589ae812b42268da08b65679e09d1 Reviewed-on: https://go-review.googlesource.com/c/go/+/185477 Run-TryBot: Agniva De Sarker <agniva.quicksilver@gmail.com> TryBot-Result: Gobot Gobot <gobot@golang.org> Reviewed-by: Cherry Zhang <cherryyz@google.com> Reviewed-by: Richard Musiol <neelance@gmail.com>
Diffstat (limited to 'src/cmd/compile/internal/ssa/rewriteWasm.go')
-rw-r--r--src/cmd/compile/internal/ssa/rewriteWasm.go98
1 files changed, 98 insertions, 0 deletions
diff --git a/src/cmd/compile/internal/ssa/rewriteWasm.go b/src/cmd/compile/internal/ssa/rewriteWasm.go
index f57305dade..f374565327 100644
--- a/src/cmd/compile/internal/ssa/rewriteWasm.go
+++ b/src/cmd/compile/internal/ssa/rewriteWasm.go
@@ -5508,6 +5508,8 @@ func rewriteValueWasm_OpWasmI64Eqz_0(v *Value) bool {
return false
}
func rewriteValueWasm_OpWasmI64Load_0(v *Value) bool {
+ b := v.Block
+ config := b.Func.Config
// match: (I64Load [off] (I64AddConst [off2] ptr) mem)
// cond: isU32Bit(off+off2)
// result: (I64Load [off+off2] ptr mem)
@@ -5529,6 +5531,29 @@ func rewriteValueWasm_OpWasmI64Load_0(v *Value) bool {
v.AddArg(mem)
return true
}
+ // match: (I64Load [off] (LoweredAddr {sym} [off2] (SB)) _)
+ // cond: symIsRO(sym) && isU32Bit(off+off2)
+ // result: (I64Const [int64(read64(sym, off+off2, config.BigEndian))])
+ for {
+ off := v.AuxInt
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpWasmLoweredAddr {
+ break
+ }
+ off2 := v_0.AuxInt
+ sym := v_0.Aux
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpSB {
+ break
+ }
+ if !(symIsRO(sym) && isU32Bit(off+off2)) {
+ break
+ }
+ v.reset(OpWasmI64Const)
+ v.AuxInt = int64(read64(sym, off+off2, config.BigEndian))
+ return true
+ }
return false
}
func rewriteValueWasm_OpWasmI64Load16S_0(v *Value) bool {
@@ -5556,6 +5581,8 @@ func rewriteValueWasm_OpWasmI64Load16S_0(v *Value) bool {
return false
}
func rewriteValueWasm_OpWasmI64Load16U_0(v *Value) bool {
+ b := v.Block
+ config := b.Func.Config
// match: (I64Load16U [off] (I64AddConst [off2] ptr) mem)
// cond: isU32Bit(off+off2)
// result: (I64Load16U [off+off2] ptr mem)
@@ -5577,6 +5604,29 @@ func rewriteValueWasm_OpWasmI64Load16U_0(v *Value) bool {
v.AddArg(mem)
return true
}
+ // match: (I64Load16U [off] (LoweredAddr {sym} [off2] (SB)) _)
+ // cond: symIsRO(sym) && isU32Bit(off+off2)
+ // result: (I64Const [int64(read16(sym, off+off2, config.BigEndian))])
+ for {
+ off := v.AuxInt
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpWasmLoweredAddr {
+ break
+ }
+ off2 := v_0.AuxInt
+ sym := v_0.Aux
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpSB {
+ break
+ }
+ if !(symIsRO(sym) && isU32Bit(off+off2)) {
+ break
+ }
+ v.reset(OpWasmI64Const)
+ v.AuxInt = int64(read16(sym, off+off2, config.BigEndian))
+ return true
+ }
return false
}
func rewriteValueWasm_OpWasmI64Load32S_0(v *Value) bool {
@@ -5604,6 +5654,8 @@ func rewriteValueWasm_OpWasmI64Load32S_0(v *Value) bool {
return false
}
func rewriteValueWasm_OpWasmI64Load32U_0(v *Value) bool {
+ b := v.Block
+ config := b.Func.Config
// match: (I64Load32U [off] (I64AddConst [off2] ptr) mem)
// cond: isU32Bit(off+off2)
// result: (I64Load32U [off+off2] ptr mem)
@@ -5625,6 +5677,29 @@ func rewriteValueWasm_OpWasmI64Load32U_0(v *Value) bool {
v.AddArg(mem)
return true
}
+ // match: (I64Load32U [off] (LoweredAddr {sym} [off2] (SB)) _)
+ // cond: symIsRO(sym) && isU32Bit(off+off2)
+ // result: (I64Const [int64(read32(sym, off+off2, config.BigEndian))])
+ for {
+ off := v.AuxInt
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpWasmLoweredAddr {
+ break
+ }
+ off2 := v_0.AuxInt
+ sym := v_0.Aux
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpSB {
+ break
+ }
+ if !(symIsRO(sym) && isU32Bit(off+off2)) {
+ break
+ }
+ v.reset(OpWasmI64Const)
+ v.AuxInt = int64(read32(sym, off+off2, config.BigEndian))
+ return true
+ }
return false
}
func rewriteValueWasm_OpWasmI64Load8S_0(v *Value) bool {
@@ -5673,6 +5748,29 @@ func rewriteValueWasm_OpWasmI64Load8U_0(v *Value) bool {
v.AddArg(mem)
return true
}
+ // match: (I64Load8U [off] (LoweredAddr {sym} [off2] (SB)) _)
+ // cond: symIsRO(sym) && isU32Bit(off+off2)
+ // result: (I64Const [int64(read8(sym, off+off2))])
+ for {
+ off := v.AuxInt
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpWasmLoweredAddr {
+ break
+ }
+ off2 := v_0.AuxInt
+ sym := v_0.Aux
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpSB {
+ break
+ }
+ if !(symIsRO(sym) && isU32Bit(off+off2)) {
+ break
+ }
+ v.reset(OpWasmI64Const)
+ v.AuxInt = int64(read8(sym, off+off2))
+ return true
+ }
return false
}
func rewriteValueWasm_OpWasmI64Mul_0(v *Value) bool {