Commit 7846500a authored by Cherry Zhang's avatar Cherry Zhang

cmd/compile: remove redundant constant shift rules

Normal shift rules plus constant folding are enough to generate
efficient shift-by-constant instructions.

Add test to make sure we don't generate comparisons for constant
shifts.

TODO: there are still constant shift rules on PPC64. If they
are removed, the constant folding rules are not enough to remove
all the test and mask stuff for constant shifts. Leave them in
for now.

Fixes #20663.

Change-Id: I724cc324aa8607762d0c8aacf9bfa641bda5c2a1
Reviewed-on: https://go-review.googlesource.com/60330
Run-TryBot: Cherry Zhang <cherryyz@google.com>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: default avatarKeith Randall <khr@golang.org>
parent a9216a0a
......@@ -257,6 +257,11 @@ var allAsmTests = []*asmTests{
imports: []string{"math/bits"},
tests: linuxMIPSTests,
},
{
arch: "mips64",
os: "linux",
tests: linuxMIPS64Tests,
},
{
arch: "ppc64le",
os: "linux",
......@@ -1744,6 +1749,17 @@ var linuxARM64Tests = []*asmTest{
`,
pos: []string{"TEXT\t.*, [$]-8-8"},
},
{
// check that we don't emit comparisons for constant shift
fn: `
//go:nosplit
func $(x int) int {
return x << 17
}
`,
pos: []string{"LSL\t\\$17"},
neg: []string{"CMP"},
},
}
var linuxMIPSTests = []*asmTest{
......@@ -1839,6 +1855,19 @@ var linuxMIPSTests = []*asmTest{
},
}
var linuxMIPS64Tests = []*asmTest{
{
// check that we don't emit comparisons for constant shift
fn: `
func $(x int) int {
return x << 17
}
`,
pos: []string{"SLLV\t\\$17"},
neg: []string{"SGT"},
},
}
var linuxPPC64LETests = []*asmTest{
// Fused multiply-add/sub instructions.
{
......
......@@ -103,98 +103,68 @@
(NeqB x y) -> (XOR x y)
(Not x) -> (XOR (MOVDconst [1]) x)
// constant shifts
(Lsh64x64 x (MOVDconst [c])) && uint64(c) < 64 -> (SLLconst x [c])
(Rsh64x64 x (MOVDconst [c])) && uint64(c) < 64 -> (SRAconst x [c])
(Rsh64Ux64 x (MOVDconst [c])) && uint64(c) < 64 -> (SRLconst x [c])
(Lsh32x64 x (MOVDconst [c])) && uint64(c) < 32 -> (SLLconst x [c])
(Rsh32x64 x (MOVDconst [c])) && uint64(c) < 32 -> (SRAconst (SignExt32to64 x) [c])
(Rsh32Ux64 x (MOVDconst [c])) && uint64(c) < 32 -> (SRLconst (ZeroExt32to64 x) [c])
(Lsh16x64 x (MOVDconst [c])) && uint64(c) < 16 -> (SLLconst x [c])
(Rsh16x64 x (MOVDconst [c])) && uint64(c) < 16 -> (SRAconst (SignExt16to64 x) [c])
(Rsh16Ux64 x (MOVDconst [c])) && uint64(c) < 16 -> (SRLconst (ZeroExt16to64 x) [c])
(Lsh8x64 x (MOVDconst [c])) && uint64(c) < 8 -> (SLLconst x [c])
(Rsh8x64 x (MOVDconst [c])) && uint64(c) < 8 -> (SRAconst (SignExt8to64 x) [c])
(Rsh8Ux64 x (MOVDconst [c])) && uint64(c) < 8 -> (SRLconst (ZeroExt8to64 x) [c])
// large constant shifts
(Lsh64x64 _ (MOVDconst [c])) && uint64(c) >= 64 -> (MOVDconst [0])
(Rsh64Ux64 _ (MOVDconst [c])) && uint64(c) >= 64 -> (MOVDconst [0])
(Lsh32x64 _ (MOVDconst [c])) && uint64(c) >= 32 -> (MOVDconst [0])
(Rsh32Ux64 _ (MOVDconst [c])) && uint64(c) >= 32 -> (MOVDconst [0])
(Lsh16x64 _ (MOVDconst [c])) && uint64(c) >= 16 -> (MOVDconst [0])
(Rsh16Ux64 _ (MOVDconst [c])) && uint64(c) >= 16 -> (MOVDconst [0])
(Lsh8x64 _ (MOVDconst [c])) && uint64(c) >= 8 -> (MOVDconst [0])
(Rsh8Ux64 _ (MOVDconst [c])) && uint64(c) >= 8 -> (MOVDconst [0])
// large constant signed right shift, we leave the sign bit
(Rsh64x64 x (MOVDconst [c])) && uint64(c) >= 64 -> (SRAconst x [63])
(Rsh32x64 x (MOVDconst [c])) && uint64(c) >= 32 -> (SRAconst (SignExt32to64 x) [63])
(Rsh16x64 x (MOVDconst [c])) && uint64(c) >= 16 -> (SRAconst (SignExt16to64 x) [63])
(Rsh8x64 x (MOVDconst [c])) && uint64(c) >= 8 -> (SRAconst (SignExt8to64 x) [63])
// shifts
// hardware instruction uses only the low 6 bits of the shift
// we compare to 64 to ensure Go semantics for large shifts
(Lsh64x64 <t> x y) -> (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
(Lsh64x32 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
(Lsh64x16 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
(Lsh64x8 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
(Lsh32x64 <t> x y) -> (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
(Lsh32x32 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
(Lsh32x16 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
(Lsh32x8 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
(Lsh16x64 <t> x y) -> (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
(Lsh16x32 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
(Lsh16x16 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
(Lsh16x8 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
(Lsh8x64 <t> x y) -> (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
(Lsh8x32 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
(Lsh8x16 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
(Lsh8x8 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
(Rsh64Ux64 <t> x y) -> (CSELULT (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
(Rsh64Ux32 <t> x y) -> (CSELULT (SRL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
(Rsh64Ux16 <t> x y) -> (CSELULT (SRL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
(Rsh64Ux8 <t> x y) -> (CSELULT (SRL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
(Rsh32Ux64 <t> x y) -> (CSELULT (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
(Rsh32Ux32 <t> x y) -> (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
(Rsh32Ux16 <t> x y) -> (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
(Rsh32Ux8 <t> x y) -> (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
(Rsh16Ux64 <t> x y) -> (CSELULT (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
(Rsh16Ux32 <t> x y) -> (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
(Rsh16Ux16 <t> x y) -> (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
(Rsh16Ux8 <t> x y) -> (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
(Rsh8Ux64 <t> x y) -> (CSELULT (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
(Rsh8Ux32 <t> x y) -> (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
(Rsh8Ux16 <t> x y) -> (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
(Rsh8Ux8 <t> x y) -> (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
(Rsh64x64 x y) -> (SRA x (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
(Rsh64x32 x y) -> (SRA x (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
(Rsh64x16 x y) -> (SRA x (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
(Rsh64x8 x y) -> (SRA x (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
(Rsh32x64 x y) -> (SRA (SignExt32to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
(Rsh32x32 x y) -> (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
(Rsh32x16 x y) -> (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
(Rsh32x8 x y) -> (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
(Rsh16x64 x y) -> (SRA (SignExt16to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
(Rsh16x32 x y) -> (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
(Rsh16x16 x y) -> (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
(Rsh16x8 x y) -> (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
(Rsh8x64 x y) -> (SRA (SignExt8to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
(Rsh8x32 x y) -> (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
(Rsh8x16 x y) -> (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
(Rsh8x8 x y) -> (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
(Lsh64x64 <t> x y) -> (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y))
(Lsh64x32 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
(Lsh64x16 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
(Lsh64x8 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
(Lsh32x64 <t> x y) -> (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y))
(Lsh32x32 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
(Lsh32x16 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
(Lsh32x8 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
(Lsh16x64 <t> x y) -> (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y))
(Lsh16x32 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
(Lsh16x16 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
(Lsh16x8 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
(Lsh8x64 <t> x y) -> (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y))
(Lsh8x32 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
(Lsh8x16 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
(Lsh8x8 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
(Rsh64Ux64 <t> x y) -> (CSELULT (SRL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y))
(Rsh64Ux32 <t> x y) -> (CSELULT (SRL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
(Rsh64Ux16 <t> x y) -> (CSELULT (SRL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
(Rsh64Ux8 <t> x y) -> (CSELULT (SRL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
(Rsh32Ux64 <t> x y) -> (CSELULT (SRL <t> (ZeroExt32to64 x) y) (MOVDconst <t> [0]) (CMPconst [64] y))
(Rsh32Ux32 <t> x y) -> (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
(Rsh32Ux16 <t> x y) -> (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
(Rsh32Ux8 <t> x y) -> (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
(Rsh16Ux64 <t> x y) -> (CSELULT (SRL <t> (ZeroExt16to64 x) y) (MOVDconst <t> [0]) (CMPconst [64] y))
(Rsh16Ux32 <t> x y) -> (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
(Rsh16Ux16 <t> x y) -> (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
(Rsh16Ux8 <t> x y) -> (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
(Rsh8Ux64 <t> x y) -> (CSELULT (SRL <t> (ZeroExt8to64 x) y) (MOVDconst <t> [0]) (CMPconst [64] y))
(Rsh8Ux32 <t> x y) -> (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
(Rsh8Ux16 <t> x y) -> (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
(Rsh8Ux8 <t> x y) -> (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
(Rsh64x64 x y) -> (SRA x (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y)))
(Rsh64x32 x y) -> (SRA x (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
(Rsh64x16 x y) -> (SRA x (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
(Rsh64x8 x y) -> (SRA x (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
(Rsh32x64 x y) -> (SRA (SignExt32to64 x) (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y)))
(Rsh32x32 x y) -> (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
(Rsh32x16 x y) -> (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
(Rsh32x8 x y) -> (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
(Rsh16x64 x y) -> (SRA (SignExt16to64 x) (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y)))
(Rsh16x32 x y) -> (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
(Rsh16x16 x y) -> (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
(Rsh16x8 x y) -> (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
(Rsh8x64 x y) -> (SRA (SignExt8to64 x) (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y)))
(Rsh8x32 x y) -> (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
(Rsh8x16 x y) -> (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
(Rsh8x8 x y) -> (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
// constants
(Const64 [val]) -> (MOVDconst [val])
......
......@@ -71,65 +71,65 @@
// shifts
// hardware instruction uses only the low 6 bits of the shift
// we compare to 64 to ensure Go semantics for large shifts
(Lsh64x64 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SLLV <t> x y))
(Lsh64x32 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
(Lsh64x16 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
(Lsh64x8 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
(Lsh32x64 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SLLV <t> x y))
(Lsh32x32 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
(Lsh32x16 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
(Lsh32x8 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
(Lsh16x64 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SLLV <t> x y))
(Lsh16x32 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
(Lsh16x16 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
(Lsh16x8 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
(Lsh8x64 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SLLV <t> x y))
(Lsh8x32 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
(Lsh8x16 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
(Lsh8x8 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
(Rsh64Ux64 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SRLV <t> x y))
(Rsh64Ux32 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> x (ZeroExt32to64 y)))
(Rsh64Ux16 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> x (ZeroExt16to64 y)))
(Rsh64Ux8 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> x (ZeroExt8to64 y)))
(Rsh32Ux64 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt32to64 x) y))
(Rsh32Ux32 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y)))
(Rsh32Ux16 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y)))
(Rsh32Ux8 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y)))
(Rsh16Ux64 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt16to64 x) y))
(Rsh16Ux32 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)))
(Rsh16Ux16 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)))
(Rsh16Ux8 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y)))
(Rsh8Ux64 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt8to64 x) y))
(Rsh8Ux32 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)))
(Rsh8Ux16 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)))
(Rsh8Ux8 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y)))
(Rsh64x64 <t> x y) -> (SRAV x (OR <t> (NEGV <t> (SGTU y (Const64 <typ.UInt64> [63]))) y))
(Rsh64x32 <t> x y) -> (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt32to64 y)))
(Rsh64x16 <t> x y) -> (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt16to64 y)))
(Rsh64x8 <t> x y) -> (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt8to64 y)))
(Rsh32x64 <t> x y) -> (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <typ.UInt64> [63]))) y))
(Rsh32x32 <t> x y) -> (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt32to64 y)))
(Rsh32x16 <t> x y) -> (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt16to64 y)))
(Rsh32x8 <t> x y) -> (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt8to64 y)))
(Rsh16x64 <t> x y) -> (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <typ.UInt64> [63]))) y))
(Rsh16x32 <t> x y) -> (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt32to64 y)))
(Rsh16x16 <t> x y) -> (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt16to64 y)))
(Rsh16x8 <t> x y) -> (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt8to64 y)))
(Rsh8x64 <t> x y) -> (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <typ.UInt64> [63]))) y))
(Rsh8x32 <t> x y) -> (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt32to64 y)))
(Rsh8x16 <t> x y) -> (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt16to64 y)))
(Rsh8x8 <t> x y) -> (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt8to64 y)))
(Lsh64x64 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
(Lsh64x32 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
(Lsh64x16 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
(Lsh64x8 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
(Lsh32x64 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
(Lsh32x32 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
(Lsh32x16 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
(Lsh32x8 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
(Lsh16x64 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
(Lsh16x32 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
(Lsh16x16 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
(Lsh16x8 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
(Lsh8x64 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
(Lsh8x32 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
(Lsh8x16 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
(Lsh8x8 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
(Rsh64Ux64 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> x y))
(Rsh64Ux32 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> x (ZeroExt32to64 y)))
(Rsh64Ux16 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> x (ZeroExt16to64 y)))
(Rsh64Ux8 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> x (ZeroExt8to64 y)))
(Rsh32Ux64 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt32to64 x) y))
(Rsh32Ux32 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y)))
(Rsh32Ux16 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y)))
(Rsh32Ux8 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y)))
(Rsh16Ux64 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt16to64 x) y))
(Rsh16Ux32 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)))
(Rsh16Ux16 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)))
(Rsh16Ux8 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y)))
(Rsh8Ux64 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt8to64 x) y))
(Rsh8Ux32 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)))
(Rsh8Ux16 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)))
(Rsh8Ux8 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y)))
(Rsh64x64 <t> x y) -> (SRAV x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
(Rsh64x32 <t> x y) -> (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
(Rsh64x16 <t> x y) -> (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
(Rsh64x8 <t> x y) -> (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
(Rsh32x64 <t> x y) -> (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
(Rsh32x32 <t> x y) -> (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
(Rsh32x16 <t> x y) -> (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
(Rsh32x8 <t> x y) -> (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
(Rsh16x64 <t> x y) -> (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
(Rsh16x32 <t> x y) -> (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
(Rsh16x16 <t> x y) -> (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
(Rsh16x8 <t> x y) -> (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
(Rsh8x64 <t> x y) -> (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
(Rsh8x32 <t> x y) -> (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
(Rsh8x16 <t> x y) -> (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
(Rsh8x8 <t> x y) -> (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
// unary ops
(Neg64 x) -> (NEGV x)
......
......@@ -12775,7 +12775,7 @@ func rewriteValueARM64_OpLsh16x16_0(v *Value) bool {
_ = typ
// match: (Lsh16x16 <t> x y)
// cond:
// result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
// result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -12788,7 +12788,7 @@ func rewriteValueARM64_OpLsh16x16_0(v *Value) bool {
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
v2 := b.NewValue0(v.Pos, OpConst64, t)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -12807,7 +12807,7 @@ func rewriteValueARM64_OpLsh16x32_0(v *Value) bool {
_ = typ
// match: (Lsh16x32 <t> x y)
// cond:
// result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
// result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -12820,7 +12820,7 @@ func rewriteValueARM64_OpLsh16x32_0(v *Value) bool {
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
v2 := b.NewValue0(v.Pos, OpConst64, t)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -12835,45 +12835,9 @@ func rewriteValueARM64_OpLsh16x32_0(v *Value) bool {
func rewriteValueARM64_OpLsh16x64_0(v *Value) bool {
b := v.Block
_ = b
// match: (Lsh16x64 x (MOVDconst [c]))
// cond: uint64(c) < 16
// result: (SLLconst x [c])
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) < 16) {
break
}
v.reset(OpARM64SLLconst)
v.AuxInt = c
v.AddArg(x)
return true
}
// match: (Lsh16x64 _ (MOVDconst [c]))
// cond: uint64(c) >= 16
// result: (MOVDconst [0])
for {
_ = v.Args[1]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) >= 16) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (Lsh16x64 <t> x y)
// cond:
// result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
// result: (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y))
for {
t := v.Type
_ = v.Args[1]
......@@ -12884,7 +12848,7 @@ func rewriteValueARM64_OpLsh16x64_0(v *Value) bool {
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
v1 := b.NewValue0(v.Pos, OpConst64, t)
v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v1.AuxInt = 0
v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -12901,7 +12865,7 @@ func rewriteValueARM64_OpLsh16x8_0(v *Value) bool {
_ = typ
// match: (Lsh16x8 <t> x y)
// cond:
// result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
// result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -12914,7 +12878,7 @@ func rewriteValueARM64_OpLsh16x8_0(v *Value) bool {
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
v2 := b.NewValue0(v.Pos, OpConst64, t)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -12933,7 +12897,7 @@ func rewriteValueARM64_OpLsh32x16_0(v *Value) bool {
_ = typ
// match: (Lsh32x16 <t> x y)
// cond:
// result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
// result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -12946,7 +12910,7 @@ func rewriteValueARM64_OpLsh32x16_0(v *Value) bool {
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
v2 := b.NewValue0(v.Pos, OpConst64, t)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -12965,7 +12929,7 @@ func rewriteValueARM64_OpLsh32x32_0(v *Value) bool {
_ = typ
// match: (Lsh32x32 <t> x y)
// cond:
// result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
// result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -12978,7 +12942,7 @@ func rewriteValueARM64_OpLsh32x32_0(v *Value) bool {
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
v2 := b.NewValue0(v.Pos, OpConst64, t)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -12993,45 +12957,9 @@ func rewriteValueARM64_OpLsh32x32_0(v *Value) bool {
func rewriteValueARM64_OpLsh32x64_0(v *Value) bool {
b := v.Block
_ = b
// match: (Lsh32x64 x (MOVDconst [c]))
// cond: uint64(c) < 32
// result: (SLLconst x [c])
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) < 32) {
break
}
v.reset(OpARM64SLLconst)
v.AuxInt = c
v.AddArg(x)
return true
}
// match: (Lsh32x64 _ (MOVDconst [c]))
// cond: uint64(c) >= 32
// result: (MOVDconst [0])
for {
_ = v.Args[1]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) >= 32) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (Lsh32x64 <t> x y)
// cond:
// result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
// result: (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y))
for {
t := v.Type
_ = v.Args[1]
......@@ -13042,7 +12970,7 @@ func rewriteValueARM64_OpLsh32x64_0(v *Value) bool {
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
v1 := b.NewValue0(v.Pos, OpConst64, t)
v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v1.AuxInt = 0
v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -13059,7 +12987,7 @@ func rewriteValueARM64_OpLsh32x8_0(v *Value) bool {
_ = typ
// match: (Lsh32x8 <t> x y)
// cond:
// result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
// result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -13072,7 +13000,7 @@ func rewriteValueARM64_OpLsh32x8_0(v *Value) bool {
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
v2 := b.NewValue0(v.Pos, OpConst64, t)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -13091,7 +13019,7 @@ func rewriteValueARM64_OpLsh64x16_0(v *Value) bool {
_ = typ
// match: (Lsh64x16 <t> x y)
// cond:
// result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
// result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -13104,7 +13032,7 @@ func rewriteValueARM64_OpLsh64x16_0(v *Value) bool {
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
v2 := b.NewValue0(v.Pos, OpConst64, t)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -13123,7 +13051,7 @@ func rewriteValueARM64_OpLsh64x32_0(v *Value) bool {
_ = typ
// match: (Lsh64x32 <t> x y)
// cond:
// result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
// result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -13136,7 +13064,7 @@ func rewriteValueARM64_OpLsh64x32_0(v *Value) bool {
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
v2 := b.NewValue0(v.Pos, OpConst64, t)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -13151,45 +13079,9 @@ func rewriteValueARM64_OpLsh64x32_0(v *Value) bool {
func rewriteValueARM64_OpLsh64x64_0(v *Value) bool {
b := v.Block
_ = b
// match: (Lsh64x64 x (MOVDconst [c]))
// cond: uint64(c) < 64
// result: (SLLconst x [c])
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) < 64) {
break
}
v.reset(OpARM64SLLconst)
v.AuxInt = c
v.AddArg(x)
return true
}
// match: (Lsh64x64 _ (MOVDconst [c]))
// cond: uint64(c) >= 64
// result: (MOVDconst [0])
for {
_ = v.Args[1]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) >= 64) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (Lsh64x64 <t> x y)
// cond:
// result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
// result: (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y))
for {
t := v.Type
_ = v.Args[1]
......@@ -13200,7 +13092,7 @@ func rewriteValueARM64_OpLsh64x64_0(v *Value) bool {
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
v1 := b.NewValue0(v.Pos, OpConst64, t)
v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v1.AuxInt = 0
v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -13217,7 +13109,7 @@ func rewriteValueARM64_OpLsh64x8_0(v *Value) bool {
_ = typ
// match: (Lsh64x8 <t> x y)
// cond:
// result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
// result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -13230,7 +13122,7 @@ func rewriteValueARM64_OpLsh64x8_0(v *Value) bool {
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
v2 := b.NewValue0(v.Pos, OpConst64, t)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -13249,7 +13141,7 @@ func rewriteValueARM64_OpLsh8x16_0(v *Value) bool {
_ = typ
// match: (Lsh8x16 <t> x y)
// cond:
// result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
// result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -13262,7 +13154,7 @@ func rewriteValueARM64_OpLsh8x16_0(v *Value) bool {
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
v2 := b.NewValue0(v.Pos, OpConst64, t)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -13281,7 +13173,7 @@ func rewriteValueARM64_OpLsh8x32_0(v *Value) bool {
_ = typ
// match: (Lsh8x32 <t> x y)
// cond:
// result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
// result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -13294,7 +13186,7 @@ func rewriteValueARM64_OpLsh8x32_0(v *Value) bool {
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
v2 := b.NewValue0(v.Pos, OpConst64, t)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -13309,45 +13201,9 @@ func rewriteValueARM64_OpLsh8x32_0(v *Value) bool {
func rewriteValueARM64_OpLsh8x64_0(v *Value) bool {
b := v.Block
_ = b
// match: (Lsh8x64 x (MOVDconst [c]))
// cond: uint64(c) < 8
// result: (SLLconst x [c])
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) < 8) {
break
}
v.reset(OpARM64SLLconst)
v.AuxInt = c
v.AddArg(x)
return true
}
// match: (Lsh8x64 _ (MOVDconst [c]))
// cond: uint64(c) >= 8
// result: (MOVDconst [0])
for {
_ = v.Args[1]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) >= 8) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (Lsh8x64 <t> x y)
// cond:
// result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
// result: (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y))
for {
t := v.Type
_ = v.Args[1]
......@@ -13358,7 +13214,7 @@ func rewriteValueARM64_OpLsh8x64_0(v *Value) bool {
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
v1 := b.NewValue0(v.Pos, OpConst64, t)
v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v1.AuxInt = 0
v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -13375,7 +13231,7 @@ func rewriteValueARM64_OpLsh8x8_0(v *Value) bool {
_ = typ
// match: (Lsh8x8 <t> x y)
// cond:
// result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
// result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -13388,7 +13244,7 @@ func rewriteValueARM64_OpLsh8x8_0(v *Value) bool {
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
v2 := b.NewValue0(v.Pos, OpConst64, t)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -14408,7 +14264,7 @@ func rewriteValueARM64_OpRsh16Ux16_0(v *Value) bool {
_ = typ
// match: (Rsh16Ux16 <t> x y)
// cond:
// result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
// result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -14423,7 +14279,7 @@ func rewriteValueARM64_OpRsh16Ux16_0(v *Value) bool {
v2.AddArg(y)
v0.AddArg(v2)
v.AddArg(v0)
v3 := b.NewValue0(v.Pos, OpConst64, t)
v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v3.AuxInt = 0
v.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -14442,7 +14298,7 @@ func rewriteValueARM64_OpRsh16Ux32_0(v *Value) bool {
_ = typ
// match: (Rsh16Ux32 <t> x y)
// cond:
// result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
// result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -14457,7 +14313,7 @@ func rewriteValueARM64_OpRsh16Ux32_0(v *Value) bool {
v2.AddArg(y)
v0.AddArg(v2)
v.AddArg(v0)
v3 := b.NewValue0(v.Pos, OpConst64, t)
v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v3.AuxInt = 0
v.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -14474,47 +14330,9 @@ func rewriteValueARM64_OpRsh16Ux64_0(v *Value) bool {
_ = b
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh16Ux64 x (MOVDconst [c]))
// cond: uint64(c) < 16
// result: (SRLconst (ZeroExt16to64 x) [c])
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) < 16) {
break
}
v.reset(OpARM64SRLconst)
v.AuxInt = c
v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (Rsh16Ux64 _ (MOVDconst [c]))
// cond: uint64(c) >= 16
// result: (MOVDconst [0])
for {
_ = v.Args[1]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) >= 16) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (Rsh16Ux64 <t> x y)
// cond:
// result: (CSELULT (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
// result: (CSELULT (SRL <t> (ZeroExt16to64 x) y) (MOVDconst <t> [0]) (CMPconst [64] y))
for {
t := v.Type
_ = v.Args[1]
......@@ -14527,7 +14345,7 @@ func rewriteValueARM64_OpRsh16Ux64_0(v *Value) bool {
v0.AddArg(v1)
v0.AddArg(y)
v.AddArg(v0)
v2 := b.NewValue0(v.Pos, OpConst64, t)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -14544,7 +14362,7 @@ func rewriteValueARM64_OpRsh16Ux8_0(v *Value) bool {
_ = typ
// match: (Rsh16Ux8 <t> x y)
// cond:
// result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
// result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -14559,7 +14377,7 @@ func rewriteValueARM64_OpRsh16Ux8_0(v *Value) bool {
v2.AddArg(y)
v0.AddArg(v2)
v.AddArg(v0)
v3 := b.NewValue0(v.Pos, OpConst64, t)
v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v3.AuxInt = 0
v.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -14578,7 +14396,7 @@ func rewriteValueARM64_OpRsh16x16_0(v *Value) bool {
_ = typ
// match: (Rsh16x16 x y)
// cond:
// result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
// result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
......@@ -14591,7 +14409,7 @@ func rewriteValueARM64_OpRsh16x16_0(v *Value) bool {
v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
v2.AddArg(y)
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v3.AuxInt = 63
v1.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -14611,7 +14429,7 @@ func rewriteValueARM64_OpRsh16x32_0(v *Value) bool {
_ = typ
// match: (Rsh16x32 x y)
// cond:
// result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
// result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
......@@ -14624,7 +14442,7 @@ func rewriteValueARM64_OpRsh16x32_0(v *Value) bool {
v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
v2.AddArg(y)
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v3.AuxInt = 63
v1.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -14642,51 +14460,9 @@ func rewriteValueARM64_OpRsh16x64_0(v *Value) bool {
_ = b
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh16x64 x (MOVDconst [c]))
// cond: uint64(c) < 16
// result: (SRAconst (SignExt16to64 x) [c])
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) < 16) {
break
}
v.reset(OpARM64SRAconst)
v.AuxInt = c
v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (Rsh16x64 x (MOVDconst [c]))
// cond: uint64(c) >= 16
// result: (SRAconst (SignExt16to64 x) [63])
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) >= 16) {
break
}
v.reset(OpARM64SRAconst)
v.AuxInt = 63
v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (Rsh16x64 x y)
// cond:
// result: (SRA (SignExt16to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
// result: (SRA (SignExt16to64 x) (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y)))
for {
_ = v.Args[1]
x := v.Args[0]
......@@ -14697,7 +14473,7 @@ func rewriteValueARM64_OpRsh16x64_0(v *Value) bool {
v.AddArg(v0)
v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
v1.AddArg(y)
v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v2.AuxInt = 63
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -14715,7 +14491,7 @@ func rewriteValueARM64_OpRsh16x8_0(v *Value) bool {
_ = typ
// match: (Rsh16x8 x y)
// cond:
// result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
// result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
......@@ -14728,7 +14504,7 @@ func rewriteValueARM64_OpRsh16x8_0(v *Value) bool {
v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
v2.AddArg(y)
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v3.AuxInt = 63
v1.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -14748,7 +14524,7 @@ func rewriteValueARM64_OpRsh32Ux16_0(v *Value) bool {
_ = typ
// match: (Rsh32Ux16 <t> x y)
// cond:
// result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
// result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -14763,7 +14539,7 @@ func rewriteValueARM64_OpRsh32Ux16_0(v *Value) bool {
v2.AddArg(y)
v0.AddArg(v2)
v.AddArg(v0)
v3 := b.NewValue0(v.Pos, OpConst64, t)
v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v3.AuxInt = 0
v.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -14782,7 +14558,7 @@ func rewriteValueARM64_OpRsh32Ux32_0(v *Value) bool {
_ = typ
// match: (Rsh32Ux32 <t> x y)
// cond:
// result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
// result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -14797,7 +14573,7 @@ func rewriteValueARM64_OpRsh32Ux32_0(v *Value) bool {
v2.AddArg(y)
v0.AddArg(v2)
v.AddArg(v0)
v3 := b.NewValue0(v.Pos, OpConst64, t)
v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v3.AuxInt = 0
v.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -14814,47 +14590,9 @@ func rewriteValueARM64_OpRsh32Ux64_0(v *Value) bool {
_ = b
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh32Ux64 x (MOVDconst [c]))
// cond: uint64(c) < 32
// result: (SRLconst (ZeroExt32to64 x) [c])
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) < 32) {
break
}
v.reset(OpARM64SRLconst)
v.AuxInt = c
v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (Rsh32Ux64 _ (MOVDconst [c]))
// cond: uint64(c) >= 32
// result: (MOVDconst [0])
for {
_ = v.Args[1]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) >= 32) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (Rsh32Ux64 <t> x y)
// cond:
// result: (CSELULT (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
// result: (CSELULT (SRL <t> (ZeroExt32to64 x) y) (MOVDconst <t> [0]) (CMPconst [64] y))
for {
t := v.Type
_ = v.Args[1]
......@@ -14867,7 +14605,7 @@ func rewriteValueARM64_OpRsh32Ux64_0(v *Value) bool {
v0.AddArg(v1)
v0.AddArg(y)
v.AddArg(v0)
v2 := b.NewValue0(v.Pos, OpConst64, t)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -14884,7 +14622,7 @@ func rewriteValueARM64_OpRsh32Ux8_0(v *Value) bool {
_ = typ
// match: (Rsh32Ux8 <t> x y)
// cond:
// result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
// result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -14899,7 +14637,7 @@ func rewriteValueARM64_OpRsh32Ux8_0(v *Value) bool {
v2.AddArg(y)
v0.AddArg(v2)
v.AddArg(v0)
v3 := b.NewValue0(v.Pos, OpConst64, t)
v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v3.AuxInt = 0
v.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -14918,7 +14656,7 @@ func rewriteValueARM64_OpRsh32x16_0(v *Value) bool {
_ = typ
// match: (Rsh32x16 x y)
// cond:
// result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
// result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
......@@ -14931,7 +14669,7 @@ func rewriteValueARM64_OpRsh32x16_0(v *Value) bool {
v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
v2.AddArg(y)
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v3.AuxInt = 63
v1.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -14951,7 +14689,7 @@ func rewriteValueARM64_OpRsh32x32_0(v *Value) bool {
_ = typ
// match: (Rsh32x32 x y)
// cond:
// result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
// result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
......@@ -14964,7 +14702,7 @@ func rewriteValueARM64_OpRsh32x32_0(v *Value) bool {
v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
v2.AddArg(y)
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v3.AuxInt = 63
v1.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -14982,51 +14720,9 @@ func rewriteValueARM64_OpRsh32x64_0(v *Value) bool {
_ = b
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh32x64 x (MOVDconst [c]))
// cond: uint64(c) < 32
// result: (SRAconst (SignExt32to64 x) [c])
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) < 32) {
break
}
v.reset(OpARM64SRAconst)
v.AuxInt = c
v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (Rsh32x64 x (MOVDconst [c]))
// cond: uint64(c) >= 32
// result: (SRAconst (SignExt32to64 x) [63])
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) >= 32) {
break
}
v.reset(OpARM64SRAconst)
v.AuxInt = 63
v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (Rsh32x64 x y)
// cond:
// result: (SRA (SignExt32to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
// result: (SRA (SignExt32to64 x) (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y)))
for {
_ = v.Args[1]
x := v.Args[0]
......@@ -15037,7 +14733,7 @@ func rewriteValueARM64_OpRsh32x64_0(v *Value) bool {
v.AddArg(v0)
v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
v1.AddArg(y)
v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v2.AuxInt = 63
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -15055,7 +14751,7 @@ func rewriteValueARM64_OpRsh32x8_0(v *Value) bool {
_ = typ
// match: (Rsh32x8 x y)
// cond:
// result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
// result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
......@@ -15068,7 +14764,7 @@ func rewriteValueARM64_OpRsh32x8_0(v *Value) bool {
v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
v2.AddArg(y)
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v3.AuxInt = 63
v1.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -15088,7 +14784,7 @@ func rewriteValueARM64_OpRsh64Ux16_0(v *Value) bool {
_ = typ
// match: (Rsh64Ux16 <t> x y)
// cond:
// result: (CSELULT (SRL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
// result: (CSELULT (SRL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -15101,7 +14797,7 @@ func rewriteValueARM64_OpRsh64Ux16_0(v *Value) bool {
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
v2 := b.NewValue0(v.Pos, OpConst64, t)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -15120,7 +14816,7 @@ func rewriteValueARM64_OpRsh64Ux32_0(v *Value) bool {
_ = typ
// match: (Rsh64Ux32 <t> x y)
// cond:
// result: (CSELULT (SRL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
// result: (CSELULT (SRL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -15133,7 +14829,7 @@ func rewriteValueARM64_OpRsh64Ux32_0(v *Value) bool {
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
v2 := b.NewValue0(v.Pos, OpConst64, t)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -15148,45 +14844,9 @@ func rewriteValueARM64_OpRsh64Ux32_0(v *Value) bool {
func rewriteValueARM64_OpRsh64Ux64_0(v *Value) bool {
b := v.Block
_ = b
// match: (Rsh64Ux64 x (MOVDconst [c]))
// cond: uint64(c) < 64
// result: (SRLconst x [c])
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) < 64) {
break
}
v.reset(OpARM64SRLconst)
v.AuxInt = c
v.AddArg(x)
return true
}
// match: (Rsh64Ux64 _ (MOVDconst [c]))
// cond: uint64(c) >= 64
// result: (MOVDconst [0])
for {
_ = v.Args[1]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) >= 64) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (Rsh64Ux64 <t> x y)
// cond:
// result: (CSELULT (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
// result: (CSELULT (SRL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y))
for {
t := v.Type
_ = v.Args[1]
......@@ -15197,7 +14857,7 @@ func rewriteValueARM64_OpRsh64Ux64_0(v *Value) bool {
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
v1 := b.NewValue0(v.Pos, OpConst64, t)
v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v1.AuxInt = 0
v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -15214,7 +14874,7 @@ func rewriteValueARM64_OpRsh64Ux8_0(v *Value) bool {
_ = typ
// match: (Rsh64Ux8 <t> x y)
// cond:
// result: (CSELULT (SRL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
// result: (CSELULT (SRL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -15227,7 +14887,7 @@ func rewriteValueARM64_OpRsh64Ux8_0(v *Value) bool {
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
v2 := b.NewValue0(v.Pos, OpConst64, t)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -15246,7 +14906,7 @@ func rewriteValueARM64_OpRsh64x16_0(v *Value) bool {
_ = typ
// match: (Rsh64x16 x y)
// cond:
// result: (SRA x (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
// result: (SRA x (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
......@@ -15257,7 +14917,7 @@ func rewriteValueARM64_OpRsh64x16_0(v *Value) bool {
v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
v1.AddArg(y)
v0.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v2.AuxInt = 63
v0.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -15277,7 +14937,7 @@ func rewriteValueARM64_OpRsh64x32_0(v *Value) bool {
_ = typ
// match: (Rsh64x32 x y)
// cond:
// result: (SRA x (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
// result: (SRA x (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
......@@ -15288,7 +14948,7 @@ func rewriteValueARM64_OpRsh64x32_0(v *Value) bool {
v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
v1.AddArg(y)
v0.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v2.AuxInt = 63
v0.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -15304,47 +14964,9 @@ func rewriteValueARM64_OpRsh64x32_0(v *Value) bool {
func rewriteValueARM64_OpRsh64x64_0(v *Value) bool {
b := v.Block
_ = b
// match: (Rsh64x64 x (MOVDconst [c]))
// cond: uint64(c) < 64
// result: (SRAconst x [c])
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) < 64) {
break
}
v.reset(OpARM64SRAconst)
v.AuxInt = c
v.AddArg(x)
return true
}
// match: (Rsh64x64 x (MOVDconst [c]))
// cond: uint64(c) >= 64
// result: (SRAconst x [63])
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) >= 64) {
break
}
v.reset(OpARM64SRAconst)
v.AuxInt = 63
v.AddArg(x)
return true
}
// match: (Rsh64x64 x y)
// cond:
// result: (SRA x (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
// result: (SRA x (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y)))
for {
_ = v.Args[1]
x := v.Args[0]
......@@ -15353,7 +14975,7 @@ func rewriteValueARM64_OpRsh64x64_0(v *Value) bool {
v.AddArg(x)
v0 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
v0.AddArg(y)
v1 := b.NewValue0(v.Pos, OpConst64, y.Type)
v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v1.AuxInt = 63
v0.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -15371,7 +14993,7 @@ func rewriteValueARM64_OpRsh64x8_0(v *Value) bool {
_ = typ
// match: (Rsh64x8 x y)
// cond:
// result: (SRA x (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
// result: (SRA x (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
......@@ -15382,7 +15004,7 @@ func rewriteValueARM64_OpRsh64x8_0(v *Value) bool {
v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
v1.AddArg(y)
v0.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v2.AuxInt = 63
v0.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -15402,7 +15024,7 @@ func rewriteValueARM64_OpRsh8Ux16_0(v *Value) bool {
_ = typ
// match: (Rsh8Ux16 <t> x y)
// cond:
// result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
// result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -15417,7 +15039,7 @@ func rewriteValueARM64_OpRsh8Ux16_0(v *Value) bool {
v2.AddArg(y)
v0.AddArg(v2)
v.AddArg(v0)
v3 := b.NewValue0(v.Pos, OpConst64, t)
v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v3.AuxInt = 0
v.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -15436,7 +15058,7 @@ func rewriteValueARM64_OpRsh8Ux32_0(v *Value) bool {
_ = typ
// match: (Rsh8Ux32 <t> x y)
// cond:
// result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
// result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -15451,7 +15073,7 @@ func rewriteValueARM64_OpRsh8Ux32_0(v *Value) bool {
v2.AddArg(y)
v0.AddArg(v2)
v.AddArg(v0)
v3 := b.NewValue0(v.Pos, OpConst64, t)
v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v3.AuxInt = 0
v.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -15468,47 +15090,9 @@ func rewriteValueARM64_OpRsh8Ux64_0(v *Value) bool {
_ = b
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh8Ux64 x (MOVDconst [c]))
// cond: uint64(c) < 8
// result: (SRLconst (ZeroExt8to64 x) [c])
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) < 8) {
break
}
v.reset(OpARM64SRLconst)
v.AuxInt = c
v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (Rsh8Ux64 _ (MOVDconst [c]))
// cond: uint64(c) >= 8
// result: (MOVDconst [0])
for {
_ = v.Args[1]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) >= 8) {
break
}
v.reset(OpARM64MOVDconst)
v.AuxInt = 0
return true
}
// match: (Rsh8Ux64 <t> x y)
// cond:
// result: (CSELULT (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
// result: (CSELULT (SRL <t> (ZeroExt8to64 x) y) (MOVDconst <t> [0]) (CMPconst [64] y))
for {
t := v.Type
_ = v.Args[1]
......@@ -15521,7 +15105,7 @@ func rewriteValueARM64_OpRsh8Ux64_0(v *Value) bool {
v0.AddArg(v1)
v0.AddArg(y)
v.AddArg(v0)
v2 := b.NewValue0(v.Pos, OpConst64, t)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -15538,7 +15122,7 @@ func rewriteValueARM64_OpRsh8Ux8_0(v *Value) bool {
_ = typ
// match: (Rsh8Ux8 <t> x y)
// cond:
// result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
// result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -15553,7 +15137,7 @@ func rewriteValueARM64_OpRsh8Ux8_0(v *Value) bool {
v2.AddArg(y)
v0.AddArg(v2)
v.AddArg(v0)
v3 := b.NewValue0(v.Pos, OpConst64, t)
v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v3.AuxInt = 0
v.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -15572,7 +15156,7 @@ func rewriteValueARM64_OpRsh8x16_0(v *Value) bool {
_ = typ
// match: (Rsh8x16 x y)
// cond:
// result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
// result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
......@@ -15585,7 +15169,7 @@ func rewriteValueARM64_OpRsh8x16_0(v *Value) bool {
v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
v2.AddArg(y)
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v3.AuxInt = 63
v1.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -15605,7 +15189,7 @@ func rewriteValueARM64_OpRsh8x32_0(v *Value) bool {
_ = typ
// match: (Rsh8x32 x y)
// cond:
// result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
// result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
......@@ -15618,7 +15202,7 @@ func rewriteValueARM64_OpRsh8x32_0(v *Value) bool {
v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
v2.AddArg(y)
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v3.AuxInt = 63
v1.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -15636,51 +15220,9 @@ func rewriteValueARM64_OpRsh8x64_0(v *Value) bool {
_ = b
typ := &b.Func.Config.Types
_ = typ
// match: (Rsh8x64 x (MOVDconst [c]))
// cond: uint64(c) < 8
// result: (SRAconst (SignExt8to64 x) [c])
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) < 8) {
break
}
v.reset(OpARM64SRAconst)
v.AuxInt = c
v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (Rsh8x64 x (MOVDconst [c]))
// cond: uint64(c) >= 8
// result: (SRAconst (SignExt8to64 x) [63])
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
if v_1.Op != OpARM64MOVDconst {
break
}
c := v_1.AuxInt
if !(uint64(c) >= 8) {
break
}
v.reset(OpARM64SRAconst)
v.AuxInt = 63
v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
v0.AddArg(x)
v.AddArg(v0)
return true
}
// match: (Rsh8x64 x y)
// cond:
// result: (SRA (SignExt8to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
// result: (SRA (SignExt8to64 x) (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y)))
for {
_ = v.Args[1]
x := v.Args[0]
......@@ -15691,7 +15233,7 @@ func rewriteValueARM64_OpRsh8x64_0(v *Value) bool {
v.AddArg(v0)
v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
v1.AddArg(y)
v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v2.AuxInt = 63
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......@@ -15709,7 +15251,7 @@ func rewriteValueARM64_OpRsh8x8_0(v *Value) bool {
_ = typ
// match: (Rsh8x8 x y)
// cond:
// result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
// result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
......@@ -15722,7 +15264,7 @@ func rewriteValueARM64_OpRsh8x8_0(v *Value) bool {
v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
v2.AddArg(y)
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v3.AuxInt = 63
v1.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
......
......@@ -2699,7 +2699,7 @@ func rewriteValueMIPS64_OpLsh16x16_0(v *Value) bool {
_ = typ
// match: (Lsh16x16 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -2708,7 +2708,7 @@ func rewriteValueMIPS64_OpLsh16x16_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
......@@ -2732,7 +2732,7 @@ func rewriteValueMIPS64_OpLsh16x32_0(v *Value) bool {
_ = typ
// match: (Lsh16x32 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -2741,7 +2741,7 @@ func rewriteValueMIPS64_OpLsh16x32_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
......@@ -2765,7 +2765,7 @@ func rewriteValueMIPS64_OpLsh16x64_0(v *Value) bool {
_ = typ
// match: (Lsh16x64 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SLLV <t> x y))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
for {
t := v.Type
_ = v.Args[1]
......@@ -2774,7 +2774,7 @@ func rewriteValueMIPS64_OpLsh16x64_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v1.AddArg(y)
......@@ -2794,7 +2794,7 @@ func rewriteValueMIPS64_OpLsh16x8_0(v *Value) bool {
_ = typ
// match: (Lsh16x8 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -2803,7 +2803,7 @@ func rewriteValueMIPS64_OpLsh16x8_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
......@@ -2827,7 +2827,7 @@ func rewriteValueMIPS64_OpLsh32x16_0(v *Value) bool {
_ = typ
// match: (Lsh32x16 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -2836,7 +2836,7 @@ func rewriteValueMIPS64_OpLsh32x16_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
......@@ -2860,7 +2860,7 @@ func rewriteValueMIPS64_OpLsh32x32_0(v *Value) bool {
_ = typ
// match: (Lsh32x32 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -2869,7 +2869,7 @@ func rewriteValueMIPS64_OpLsh32x32_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
......@@ -2893,7 +2893,7 @@ func rewriteValueMIPS64_OpLsh32x64_0(v *Value) bool {
_ = typ
// match: (Lsh32x64 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SLLV <t> x y))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
for {
t := v.Type
_ = v.Args[1]
......@@ -2902,7 +2902,7 @@ func rewriteValueMIPS64_OpLsh32x64_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v1.AddArg(y)
......@@ -2922,7 +2922,7 @@ func rewriteValueMIPS64_OpLsh32x8_0(v *Value) bool {
_ = typ
// match: (Lsh32x8 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -2931,7 +2931,7 @@ func rewriteValueMIPS64_OpLsh32x8_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
......@@ -2955,7 +2955,7 @@ func rewriteValueMIPS64_OpLsh64x16_0(v *Value) bool {
_ = typ
// match: (Lsh64x16 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -2964,7 +2964,7 @@ func rewriteValueMIPS64_OpLsh64x16_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
......@@ -2988,7 +2988,7 @@ func rewriteValueMIPS64_OpLsh64x32_0(v *Value) bool {
_ = typ
// match: (Lsh64x32 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -2997,7 +2997,7 @@ func rewriteValueMIPS64_OpLsh64x32_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
......@@ -3021,7 +3021,7 @@ func rewriteValueMIPS64_OpLsh64x64_0(v *Value) bool {
_ = typ
// match: (Lsh64x64 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SLLV <t> x y))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
for {
t := v.Type
_ = v.Args[1]
......@@ -3030,7 +3030,7 @@ func rewriteValueMIPS64_OpLsh64x64_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v1.AddArg(y)
......@@ -3050,7 +3050,7 @@ func rewriteValueMIPS64_OpLsh64x8_0(v *Value) bool {
_ = typ
// match: (Lsh64x8 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -3059,7 +3059,7 @@ func rewriteValueMIPS64_OpLsh64x8_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
......@@ -3083,7 +3083,7 @@ func rewriteValueMIPS64_OpLsh8x16_0(v *Value) bool {
_ = typ
// match: (Lsh8x16 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -3092,7 +3092,7 @@ func rewriteValueMIPS64_OpLsh8x16_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
......@@ -3116,7 +3116,7 @@ func rewriteValueMIPS64_OpLsh8x32_0(v *Value) bool {
_ = typ
// match: (Lsh8x32 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -3125,7 +3125,7 @@ func rewriteValueMIPS64_OpLsh8x32_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
......@@ -3149,7 +3149,7 @@ func rewriteValueMIPS64_OpLsh8x64_0(v *Value) bool {
_ = typ
// match: (Lsh8x64 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SLLV <t> x y))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
for {
t := v.Type
_ = v.Args[1]
......@@ -3158,7 +3158,7 @@ func rewriteValueMIPS64_OpLsh8x64_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v1.AddArg(y)
......@@ -3178,7 +3178,7 @@ func rewriteValueMIPS64_OpLsh8x8_0(v *Value) bool {
_ = typ
// match: (Lsh8x8 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -3187,7 +3187,7 @@ func rewriteValueMIPS64_OpLsh8x8_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
......@@ -7521,7 +7521,7 @@ func rewriteValueMIPS64_OpRsh16Ux16_0(v *Value) bool {
_ = typ
// match: (Rsh16Ux16 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -7530,7 +7530,7 @@ func rewriteValueMIPS64_OpRsh16Ux16_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
......@@ -7556,7 +7556,7 @@ func rewriteValueMIPS64_OpRsh16Ux32_0(v *Value) bool {
_ = typ
// match: (Rsh16Ux32 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -7565,7 +7565,7 @@ func rewriteValueMIPS64_OpRsh16Ux32_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
......@@ -7591,7 +7591,7 @@ func rewriteValueMIPS64_OpRsh16Ux64_0(v *Value) bool {
_ = typ
// match: (Rsh16Ux64 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt16to64 x) y))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt16to64 x) y))
for {
t := v.Type
_ = v.Args[1]
......@@ -7600,7 +7600,7 @@ func rewriteValueMIPS64_OpRsh16Ux64_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v1.AddArg(y)
......@@ -7622,7 +7622,7 @@ func rewriteValueMIPS64_OpRsh16Ux8_0(v *Value) bool {
_ = typ
// match: (Rsh16Ux8 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -7631,7 +7631,7 @@ func rewriteValueMIPS64_OpRsh16Ux8_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
......@@ -7657,7 +7657,7 @@ func rewriteValueMIPS64_OpRsh16x16_0(v *Value) bool {
_ = typ
// match: (Rsh16x16 <t> x y)
// cond:
// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt16to64 y)))
// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -7673,7 +7673,7 @@ func rewriteValueMIPS64_OpRsh16x16_0(v *Value) bool {
v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
v4.AddArg(y)
v3.AddArg(v4)
v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v5.AuxInt = 63
v3.AddArg(v5)
v2.AddArg(v3)
......@@ -7692,7 +7692,7 @@ func rewriteValueMIPS64_OpRsh16x32_0(v *Value) bool {
_ = typ
// match: (Rsh16x32 <t> x y)
// cond:
// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt32to64 y)))
// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -7708,7 +7708,7 @@ func rewriteValueMIPS64_OpRsh16x32_0(v *Value) bool {
v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
v4.AddArg(y)
v3.AddArg(v4)
v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v5.AuxInt = 63
v3.AddArg(v5)
v2.AddArg(v3)
......@@ -7727,7 +7727,7 @@ func rewriteValueMIPS64_OpRsh16x64_0(v *Value) bool {
_ = typ
// match: (Rsh16x64 <t> x y)
// cond:
// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <typ.UInt64> [63]))) y))
// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
for {
t := v.Type
_ = v.Args[1]
......@@ -7741,7 +7741,7 @@ func rewriteValueMIPS64_OpRsh16x64_0(v *Value) bool {
v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v3.AddArg(y)
v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v4.AuxInt = 63
v3.AddArg(v4)
v2.AddArg(v3)
......@@ -7758,7 +7758,7 @@ func rewriteValueMIPS64_OpRsh16x8_0(v *Value) bool {
_ = typ
// match: (Rsh16x8 <t> x y)
// cond:
// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt8to64 y)))
// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -7774,7 +7774,7 @@ func rewriteValueMIPS64_OpRsh16x8_0(v *Value) bool {
v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
v4.AddArg(y)
v3.AddArg(v4)
v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v5.AuxInt = 63
v3.AddArg(v5)
v2.AddArg(v3)
......@@ -7793,7 +7793,7 @@ func rewriteValueMIPS64_OpRsh32Ux16_0(v *Value) bool {
_ = typ
// match: (Rsh32Ux16 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -7802,7 +7802,7 @@ func rewriteValueMIPS64_OpRsh32Ux16_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
......@@ -7828,7 +7828,7 @@ func rewriteValueMIPS64_OpRsh32Ux32_0(v *Value) bool {
_ = typ
// match: (Rsh32Ux32 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -7837,7 +7837,7 @@ func rewriteValueMIPS64_OpRsh32Ux32_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
......@@ -7863,7 +7863,7 @@ func rewriteValueMIPS64_OpRsh32Ux64_0(v *Value) bool {
_ = typ
// match: (Rsh32Ux64 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt32to64 x) y))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt32to64 x) y))
for {
t := v.Type
_ = v.Args[1]
......@@ -7872,7 +7872,7 @@ func rewriteValueMIPS64_OpRsh32Ux64_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v1.AddArg(y)
......@@ -7894,7 +7894,7 @@ func rewriteValueMIPS64_OpRsh32Ux8_0(v *Value) bool {
_ = typ
// match: (Rsh32Ux8 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -7903,7 +7903,7 @@ func rewriteValueMIPS64_OpRsh32Ux8_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
......@@ -7929,7 +7929,7 @@ func rewriteValueMIPS64_OpRsh32x16_0(v *Value) bool {
_ = typ
// match: (Rsh32x16 <t> x y)
// cond:
// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt16to64 y)))
// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -7945,7 +7945,7 @@ func rewriteValueMIPS64_OpRsh32x16_0(v *Value) bool {
v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
v4.AddArg(y)
v3.AddArg(v4)
v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v5.AuxInt = 63
v3.AddArg(v5)
v2.AddArg(v3)
......@@ -7964,7 +7964,7 @@ func rewriteValueMIPS64_OpRsh32x32_0(v *Value) bool {
_ = typ
// match: (Rsh32x32 <t> x y)
// cond:
// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt32to64 y)))
// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -7980,7 +7980,7 @@ func rewriteValueMIPS64_OpRsh32x32_0(v *Value) bool {
v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
v4.AddArg(y)
v3.AddArg(v4)
v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v5.AuxInt = 63
v3.AddArg(v5)
v2.AddArg(v3)
......@@ -7999,7 +7999,7 @@ func rewriteValueMIPS64_OpRsh32x64_0(v *Value) bool {
_ = typ
// match: (Rsh32x64 <t> x y)
// cond:
// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <typ.UInt64> [63]))) y))
// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
for {
t := v.Type
_ = v.Args[1]
......@@ -8013,7 +8013,7 @@ func rewriteValueMIPS64_OpRsh32x64_0(v *Value) bool {
v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v3.AddArg(y)
v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v4.AuxInt = 63
v3.AddArg(v4)
v2.AddArg(v3)
......@@ -8030,7 +8030,7 @@ func rewriteValueMIPS64_OpRsh32x8_0(v *Value) bool {
_ = typ
// match: (Rsh32x8 <t> x y)
// cond:
// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt8to64 y)))
// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -8046,7 +8046,7 @@ func rewriteValueMIPS64_OpRsh32x8_0(v *Value) bool {
v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
v4.AddArg(y)
v3.AddArg(v4)
v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v5.AuxInt = 63
v3.AddArg(v5)
v2.AddArg(v3)
......@@ -8065,7 +8065,7 @@ func rewriteValueMIPS64_OpRsh64Ux16_0(v *Value) bool {
_ = typ
// match: (Rsh64Ux16 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> x (ZeroExt16to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> x (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -8074,7 +8074,7 @@ func rewriteValueMIPS64_OpRsh64Ux16_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
......@@ -8098,7 +8098,7 @@ func rewriteValueMIPS64_OpRsh64Ux32_0(v *Value) bool {
_ = typ
// match: (Rsh64Ux32 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> x (ZeroExt32to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> x (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -8107,7 +8107,7 @@ func rewriteValueMIPS64_OpRsh64Ux32_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
......@@ -8131,7 +8131,7 @@ func rewriteValueMIPS64_OpRsh64Ux64_0(v *Value) bool {
_ = typ
// match: (Rsh64Ux64 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SRLV <t> x y))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> x y))
for {
t := v.Type
_ = v.Args[1]
......@@ -8140,7 +8140,7 @@ func rewriteValueMIPS64_OpRsh64Ux64_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v1.AddArg(y)
......@@ -8160,7 +8160,7 @@ func rewriteValueMIPS64_OpRsh64Ux8_0(v *Value) bool {
_ = typ
// match: (Rsh64Ux8 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> x (ZeroExt8to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> x (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -8169,7 +8169,7 @@ func rewriteValueMIPS64_OpRsh64Ux8_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
......@@ -8193,7 +8193,7 @@ func rewriteValueMIPS64_OpRsh64x16_0(v *Value) bool {
_ = typ
// match: (Rsh64x16 <t> x y)
// cond:
// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt16to64 y)))
// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -8207,7 +8207,7 @@ func rewriteValueMIPS64_OpRsh64x16_0(v *Value) bool {
v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
v3.AddArg(y)
v2.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v4.AuxInt = 63
v2.AddArg(v4)
v1.AddArg(v2)
......@@ -8226,7 +8226,7 @@ func rewriteValueMIPS64_OpRsh64x32_0(v *Value) bool {
_ = typ
// match: (Rsh64x32 <t> x y)
// cond:
// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt32to64 y)))
// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -8240,7 +8240,7 @@ func rewriteValueMIPS64_OpRsh64x32_0(v *Value) bool {
v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
v3.AddArg(y)
v2.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v4.AuxInt = 63
v2.AddArg(v4)
v1.AddArg(v2)
......@@ -8259,7 +8259,7 @@ func rewriteValueMIPS64_OpRsh64x64_0(v *Value) bool {
_ = typ
// match: (Rsh64x64 <t> x y)
// cond:
// result: (SRAV x (OR <t> (NEGV <t> (SGTU y (Const64 <typ.UInt64> [63]))) y))
// result: (SRAV x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
for {
t := v.Type
_ = v.Args[1]
......@@ -8271,7 +8271,7 @@ func rewriteValueMIPS64_OpRsh64x64_0(v *Value) bool {
v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2.AddArg(y)
v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v3.AuxInt = 63
v2.AddArg(v3)
v1.AddArg(v2)
......@@ -8288,7 +8288,7 @@ func rewriteValueMIPS64_OpRsh64x8_0(v *Value) bool {
_ = typ
// match: (Rsh64x8 <t> x y)
// cond:
// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt8to64 y)))
// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -8302,7 +8302,7 @@ func rewriteValueMIPS64_OpRsh64x8_0(v *Value) bool {
v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
v3.AddArg(y)
v2.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v4.AuxInt = 63
v2.AddArg(v4)
v1.AddArg(v2)
......@@ -8321,7 +8321,7 @@ func rewriteValueMIPS64_OpRsh8Ux16_0(v *Value) bool {
_ = typ
// match: (Rsh8Ux16 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -8330,7 +8330,7 @@ func rewriteValueMIPS64_OpRsh8Ux16_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
......@@ -8356,7 +8356,7 @@ func rewriteValueMIPS64_OpRsh8Ux32_0(v *Value) bool {
_ = typ
// match: (Rsh8Ux32 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -8365,7 +8365,7 @@ func rewriteValueMIPS64_OpRsh8Ux32_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
......@@ -8391,7 +8391,7 @@ func rewriteValueMIPS64_OpRsh8Ux64_0(v *Value) bool {
_ = typ
// match: (Rsh8Ux64 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt8to64 x) y))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt8to64 x) y))
for {
t := v.Type
_ = v.Args[1]
......@@ -8400,7 +8400,7 @@ func rewriteValueMIPS64_OpRsh8Ux64_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v1.AddArg(y)
......@@ -8422,7 +8422,7 @@ func rewriteValueMIPS64_OpRsh8Ux8_0(v *Value) bool {
_ = typ
// match: (Rsh8Ux8 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y)))
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -8431,7 +8431,7 @@ func rewriteValueMIPS64_OpRsh8Ux8_0(v *Value) bool {
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
......@@ -8457,7 +8457,7 @@ func rewriteValueMIPS64_OpRsh8x16_0(v *Value) bool {
_ = typ
// match: (Rsh8x16 <t> x y)
// cond:
// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt16to64 y)))
// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -8473,7 +8473,7 @@ func rewriteValueMIPS64_OpRsh8x16_0(v *Value) bool {
v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
v4.AddArg(y)
v3.AddArg(v4)
v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v5.AuxInt = 63
v3.AddArg(v5)
v2.AddArg(v3)
......@@ -8492,7 +8492,7 @@ func rewriteValueMIPS64_OpRsh8x32_0(v *Value) bool {
_ = typ
// match: (Rsh8x32 <t> x y)
// cond:
// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt32to64 y)))
// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -8508,7 +8508,7 @@ func rewriteValueMIPS64_OpRsh8x32_0(v *Value) bool {
v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
v4.AddArg(y)
v3.AddArg(v4)
v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v5.AuxInt = 63
v3.AddArg(v5)
v2.AddArg(v3)
......@@ -8527,7 +8527,7 @@ func rewriteValueMIPS64_OpRsh8x64_0(v *Value) bool {
_ = typ
// match: (Rsh8x64 <t> x y)
// cond:
// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <typ.UInt64> [63]))) y))
// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
for {
t := v.Type
_ = v.Args[1]
......@@ -8541,7 +8541,7 @@ func rewriteValueMIPS64_OpRsh8x64_0(v *Value) bool {
v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v3.AddArg(y)
v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v4.AuxInt = 63
v3.AddArg(v4)
v2.AddArg(v3)
......@@ -8558,7 +8558,7 @@ func rewriteValueMIPS64_OpRsh8x8_0(v *Value) bool {
_ = typ
// match: (Rsh8x8 <t> x y)
// cond:
// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt8to64 y)))
// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
......@@ -8574,7 +8574,7 @@ func rewriteValueMIPS64_OpRsh8x8_0(v *Value) bool {
v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
v4.AddArg(y)
v3.AddArg(v4)
v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v5.AuxInt = 63
v3.AddArg(v5)
v2.AddArg(v3)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment