Commit 257b01f8 authored by Cherry Zhang's avatar Cherry Zhang

cmd/compile: use ANDconst to mask out leading/trailing bits on ARM64

For an AND that masks out leading or trailing bits, generic rules
rewrite it to a pair of shifts. On ARM64, the mask actually can
fit into an AND instruction. So we rewrite it back to AND.

Fixes #19857.

Change-Id: I479d7320ae4f29bb3f0056d5979bde4478063a8f
Reviewed-on: https://go-review.googlesource.com/39651
Run-TryBot: Cherry Zhang <cherryyz@google.com>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: default avatarDavid Chase <drchase@google.com>
parent 168eb9cf
......@@ -1327,6 +1327,22 @@ var linuxARM64Tests = []*asmTest{
`,
[]string{"\tCLZ\t"},
},
{
`
func f34(a uint64) uint64 {
return a & ((1<<63)-1)
}
`,
[]string{"\tAND\t"},
},
{
`
func f35(a uint64) uint64 {
return a & (1<<63)
}
`,
[]string{"\tAND\t"},
},
}
var linuxMIPSTests = []*asmTest{
......
......@@ -1148,8 +1148,11 @@
( ORshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) && c < 32 && t.Size() == 4 -> (RORWconst [ c] x)
(XORshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) && c < 32 && t.Size() == 4 -> (RORWconst [ c] x)
// Replace SRL-of-SLL with ROR-of-SLL to avoid hardware bug.
(SRLconst [c] y:(SLLconst [c] _)) && c <= 8 -> (RORconst [c] y)
// Generic rules rewrite certain AND to a pair of shifts.
// However, on ARM64 the bitmask can fit into an instruction.
// Rewrite it back to AND.
(SRLconst [c] (SLLconst [c] x)) && 0 < c && c < 64 -> (ANDconst [1<<uint(64-c)-1] x) // mask out high bits
(SLLconst [c] (SRLconst [c] x)) && 0 < c && c < 64 -> (ANDconst [^(1<<uint(c)-1)] x) // mask out low bits
// do combined loads
// little endian loads
......
......@@ -8583,6 +8583,27 @@ func rewriteValueARM64_OpARM64SLLconst(v *Value) bool {
v.AuxInt = int64(d) << uint64(c)
return true
}
// match: (SLLconst [c] (SRLconst [c] x))
// cond: 0 < c && c < 64
// result: (ANDconst [^(1<<uint(c)-1)] x)
for {
c := v.AuxInt
v_0 := v.Args[0]
if v_0.Op != OpARM64SRLconst {
break
}
if v_0.AuxInt != c {
break
}
x := v_0.Args[0]
if !(0 < c && c < 64) {
break
}
v.reset(OpARM64ANDconst)
v.AuxInt = ^(1<<uint(c) - 1)
v.AddArg(x)
return true
}
return false
}
func rewriteValueARM64_OpARM64SRA(v *Value) bool {
......@@ -8653,24 +8674,25 @@ func rewriteValueARM64_OpARM64SRLconst(v *Value) bool {
v.AuxInt = int64(uint64(d) >> uint64(c))
return true
}
// match: (SRLconst [c] y:(SLLconst [c] _))
// cond: c <= 8
// result: (RORconst [c] y)
// match: (SRLconst [c] (SLLconst [c] x))
// cond: 0 < c && c < 64
// result: (ANDconst [1<<uint(64-c)-1] x)
for {
c := v.AuxInt
y := v.Args[0]
if y.Op != OpARM64SLLconst {
v_0 := v.Args[0]
if v_0.Op != OpARM64SLLconst {
break
}
if y.AuxInt != c {
if v_0.AuxInt != c {
break
}
if !(c <= 8) {
x := v_0.Args[0]
if !(0 < c && c < 64) {
break
}
v.reset(OpARM64RORconst)
v.AuxInt = c
v.AddArg(y)
v.reset(OpARM64ANDconst)
v.AuxInt = 1<<uint(64-c) - 1
v.AddArg(x)
return true
}
return false
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment