// Unsigned shifts need to return 0 if shift amount is >= width of shifted value.
(Lsh64x64 x y) && shiftIsBounded(v) -> (I64Shl x y)
+(Lsh64x64 x (I64Const [c])) && uint64(c) < 64 -> (I64Shl x (I64Const [c]))
+(Lsh64x64 x (I64Const [c])) && uint64(c) >= 64 -> (I64Const [0])
(Lsh64x64 x y) -> (Select (I64Shl x y) (I64Const [0]) (I64LtU y (I64Const [64])))
(Lsh64x(32|16|8) x y) -> (Lsh64x64 x (ZeroExt(32|16|8)to64 y))
(Lsh8x(32|16|8) x y) -> (Lsh64x64 x (ZeroExt(32|16|8)to64 y))
(Rsh64Ux64 x y) && shiftIsBounded(v) -> (I64ShrU x y)
+(Rsh64Ux64 x (I64Const [c])) && uint64(c) < 64 -> (I64ShrU x (I64Const [c]))
+(Rsh64Ux64 x (I64Const [c])) && uint64(c) >= 64 -> (I64Const [0])
(Rsh64Ux64 x y) -> (Select (I64ShrU x y) (I64Const [0]) (I64LtU y (I64Const [64])))
(Rsh64Ux(32|16|8) x y) -> (Rsh64Ux64 x (ZeroExt(32|16|8)to64 y))
// We implement this by setting the shift value to (width - 1) if the shift value is >= width.
(Rsh64x64 x y) && shiftIsBounded(v) -> (I64ShrS x y)
+(Rsh64x64 x (I64Const [c])) && uint64(c) < 64 -> (I64ShrS x (I64Const [c]))
+(Rsh64x64 x (I64Const [c])) && uint64(c) >= 64 -> (I64ShrS x (I64Const [63]))
(Rsh64x64 x y) -> (I64ShrS x (Select <typ.Int64> y (I64Const [63]) (I64LtU y (I64Const [64]))))
(Rsh64x(32|16|8) x y) -> (Rsh64x64 x (ZeroExt(32|16|8)to64 y))
v.AddArg(y)
return true
}
+ // match: (Lsh64x64 x (I64Const [c]))
+ // cond: uint64(c) < 64
+ // result: (I64Shl x (I64Const [c]))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpWasmI64Const {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 64) {
+ break
+ }
+ v.reset(OpWasmI64Shl)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+ v0.AuxInt = c
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Lsh64x64 x (I64Const [c]))
+ // cond: uint64(c) >= 64
+ // result: (I64Const [0])
+ for {
+ _ = v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpWasmI64Const {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) >= 64) {
+ break
+ }
+ v.reset(OpWasmI64Const)
+ v.AuxInt = 0
+ return true
+ }
// match: (Lsh64x64 x y)
// cond:
// result: (Select (I64Shl x y) (I64Const [0]) (I64LtU y (I64Const [64])))
v.AddArg(y)
return true
}
+ // match: (Rsh64Ux64 x (I64Const [c]))
+ // cond: uint64(c) < 64
+ // result: (I64ShrU x (I64Const [c]))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpWasmI64Const {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 64) {
+ break
+ }
+ v.reset(OpWasmI64ShrU)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+ v0.AuxInt = c
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh64Ux64 x (I64Const [c]))
+ // cond: uint64(c) >= 64
+ // result: (I64Const [0])
+ for {
+ _ = v.Args[1]
+ v_1 := v.Args[1]
+ if v_1.Op != OpWasmI64Const {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) >= 64) {
+ break
+ }
+ v.reset(OpWasmI64Const)
+ v.AuxInt = 0
+ return true
+ }
// match: (Rsh64Ux64 x y)
// cond:
// result: (Select (I64ShrU x y) (I64Const [0]) (I64LtU y (I64Const [64])))
v.AddArg(y)
return true
}
+ // match: (Rsh64x64 x (I64Const [c]))
+ // cond: uint64(c) < 64
+ // result: (I64ShrS x (I64Const [c]))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpWasmI64Const {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 64) {
+ break
+ }
+ v.reset(OpWasmI64ShrS)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+ v0.AuxInt = c
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh64x64 x (I64Const [c]))
+ // cond: uint64(c) >= 64
+ // result: (I64ShrS x (I64Const [63]))
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpWasmI64Const {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) >= 64) {
+ break
+ }
+ v.reset(OpWasmI64ShrS)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+ v0.AuxInt = 63
+ v.AddArg(v0)
+ return true
+ }
// match: (Rsh64x64 x y)
// cond:
// result: (I64ShrS x (Select <typ.Int64> y (I64Const [63]) (I64LtU y (I64Const [64]))))