Add a bunch of tests for shifts.
Fix triple-shift rules to always take constant shifts as 64 bits.
(Earlier rules always promote shift amounts to 64 bits.)
Add overflow checks.
Increases generic rule coverage to 91%
Change-Id: I6b42d368d19d36ac482dbb8e0d4f67e30ad7145d
Reviewed-on: https://go-review.googlesource.com/23555
Reviewed-by: Todd Neal <todd@tneal.org>
--- /dev/null
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package gc
+
+import "testing"
+
+// Tests shifts of zero.
+
+//go:noinline
+func ofz64l64(n uint64) int64 {
+ var x int64
+ return x << n
+}
+
+//go:noinline
+func ofz64l32(n uint32) int64 {
+ var x int64
+ return x << n
+}
+
+//go:noinline
+func ofz64l16(n uint16) int64 {
+ var x int64
+ return x << n
+}
+
+//go:noinline
+func ofz64l8(n uint8) int64 {
+ var x int64
+ return x << n
+}
+
+//go:noinline
+func ofz64r64(n uint64) int64 {
+ var x int64
+ return x >> n
+}
+
+//go:noinline
+func ofz64r32(n uint32) int64 {
+ var x int64
+ return x >> n
+}
+
+//go:noinline
+func ofz64r16(n uint16) int64 {
+ var x int64
+ return x >> n
+}
+
+//go:noinline
+func ofz64r8(n uint8) int64 {
+ var x int64
+ return x >> n
+}
+
+//go:noinline
+func ofz64ur64(n uint64) uint64 {
+ var x uint64
+ return x >> n
+}
+
+//go:noinline
+func ofz64ur32(n uint32) uint64 {
+ var x uint64
+ return x >> n
+}
+
+//go:noinline
+func ofz64ur16(n uint16) uint64 {
+ var x uint64
+ return x >> n
+}
+
+//go:noinline
+func ofz64ur8(n uint8) uint64 {
+ var x uint64
+ return x >> n
+}
+
+//go:noinline
+func ofz32l64(n uint64) int32 {
+ var x int32
+ return x << n
+}
+
+//go:noinline
+func ofz32l32(n uint32) int32 {
+ var x int32
+ return x << n
+}
+
+//go:noinline
+func ofz32l16(n uint16) int32 {
+ var x int32
+ return x << n
+}
+
+//go:noinline
+func ofz32l8(n uint8) int32 {
+ var x int32
+ return x << n
+}
+
+//go:noinline
+func ofz32r64(n uint64) int32 {
+ var x int32
+ return x >> n
+}
+
+//go:noinline
+func ofz32r32(n uint32) int32 {
+ var x int32
+ return x >> n
+}
+
+//go:noinline
+func ofz32r16(n uint16) int32 {
+ var x int32
+ return x >> n
+}
+
+//go:noinline
+func ofz32r8(n uint8) int32 {
+ var x int32
+ return x >> n
+}
+
+//go:noinline
+func ofz32ur64(n uint64) uint32 {
+ var x uint32
+ return x >> n
+}
+
+//go:noinline
+func ofz32ur32(n uint32) uint32 {
+ var x uint32
+ return x >> n
+}
+
+//go:noinline
+func ofz32ur16(n uint16) uint32 {
+ var x uint32
+ return x >> n
+}
+
+//go:noinline
+func ofz32ur8(n uint8) uint32 {
+ var x uint32
+ return x >> n
+}
+
+//go:noinline
+func ofz16l64(n uint64) int16 {
+ var x int16
+ return x << n
+}
+
+//go:noinline
+func ofz16l32(n uint32) int16 {
+ var x int16
+ return x << n
+}
+
+//go:noinline
+func ofz16l16(n uint16) int16 {
+ var x int16
+ return x << n
+}
+
+//go:noinline
+func ofz16l8(n uint8) int16 {
+ var x int16
+ return x << n
+}
+
+//go:noinline
+func ofz16r64(n uint64) int16 {
+ var x int16
+ return x >> n
+}
+
+//go:noinline
+func ofz16r32(n uint32) int16 {
+ var x int16
+ return x >> n
+}
+
+//go:noinline
+func ofz16r16(n uint16) int16 {
+ var x int16
+ return x >> n
+}
+
+//go:noinline
+func ofz16r8(n uint8) int16 {
+ var x int16
+ return x >> n
+}
+
+//go:noinline
+func ofz16ur64(n uint64) uint16 {
+ var x uint16
+ return x >> n
+}
+
+//go:noinline
+func ofz16ur32(n uint32) uint16 {
+ var x uint16
+ return x >> n
+}
+
+//go:noinline
+func ofz16ur16(n uint16) uint16 {
+ var x uint16
+ return x >> n
+}
+
+//go:noinline
+func ofz16ur8(n uint8) uint16 {
+ var x uint16
+ return x >> n
+}
+
+//go:noinline
+func ofz8l64(n uint64) int8 {
+ var x int8
+ return x << n
+}
+
+//go:noinline
+func ofz8l32(n uint32) int8 {
+ var x int8
+ return x << n
+}
+
+//go:noinline
+func ofz8l16(n uint16) int8 {
+ var x int8
+ return x << n
+}
+
+//go:noinline
+func ofz8l8(n uint8) int8 {
+ var x int8
+ return x << n
+}
+
+//go:noinline
+func ofz8r64(n uint64) int8 {
+ var x int8
+ return x >> n
+}
+
+//go:noinline
+func ofz8r32(n uint32) int8 {
+ var x int8
+ return x >> n
+}
+
+//go:noinline
+func ofz8r16(n uint16) int8 {
+ var x int8
+ return x >> n
+}
+
+//go:noinline
+func ofz8r8(n uint8) int8 {
+ var x int8
+ return x >> n
+}
+
+//go:noinline
+func ofz8ur64(n uint64) uint8 {
+ var x uint8
+ return x >> n
+}
+
+//go:noinline
+func ofz8ur32(n uint32) uint8 {
+ var x uint8
+ return x >> n
+}
+
+//go:noinline
+func ofz8ur16(n uint16) uint8 {
+ var x uint8
+ return x >> n
+}
+
+//go:noinline
+func ofz8ur8(n uint8) uint8 {
+ var x uint8
+ return x >> n
+}
+
+func TestShiftOfZero(t *testing.T) {
+ if got := ofz64l64(5); got != 0 {
+ t.Errorf("0<<5 == %d, want 0", got)
+ }
+ if got := ofz64l32(5); got != 0 {
+ t.Errorf("0<<5 == %d, want 0", got)
+ }
+ if got := ofz64l16(5); got != 0 {
+ t.Errorf("0<<5 == %d, want 0", got)
+ }
+ if got := ofz64l8(5); got != 0 {
+ t.Errorf("0<<5 == %d, want 0", got)
+ }
+ if got := ofz64r64(5); got != 0 {
+ t.Errorf("0>>5 == %d, want 0", got)
+ }
+ if got := ofz64r32(5); got != 0 {
+ t.Errorf("0>>5 == %d, want 0", got)
+ }
+ if got := ofz64r16(5); got != 0 {
+ t.Errorf("0>>5 == %d, want 0", got)
+ }
+ if got := ofz64r8(5); got != 0 {
+ t.Errorf("0>>5 == %d, want 0", got)
+ }
+ if got := ofz64ur64(5); got != 0 {
+ t.Errorf("0>>>5 == %d, want 0", got)
+ }
+ if got := ofz64ur32(5); got != 0 {
+ t.Errorf("0>>>5 == %d, want 0", got)
+ }
+ if got := ofz64ur16(5); got != 0 {
+ t.Errorf("0>>>5 == %d, want 0", got)
+ }
+ if got := ofz64ur8(5); got != 0 {
+ t.Errorf("0>>>5 == %d, want 0", got)
+ }
+
+ if got := ofz32l64(5); got != 0 {
+ t.Errorf("0<<5 == %d, want 0", got)
+ }
+ if got := ofz32l32(5); got != 0 {
+ t.Errorf("0<<5 == %d, want 0", got)
+ }
+ if got := ofz32l16(5); got != 0 {
+ t.Errorf("0<<5 == %d, want 0", got)
+ }
+ if got := ofz32l8(5); got != 0 {
+ t.Errorf("0<<5 == %d, want 0", got)
+ }
+ if got := ofz32r64(5); got != 0 {
+ t.Errorf("0>>5 == %d, want 0", got)
+ }
+ if got := ofz32r32(5); got != 0 {
+ t.Errorf("0>>5 == %d, want 0", got)
+ }
+ if got := ofz32r16(5); got != 0 {
+ t.Errorf("0>>5 == %d, want 0", got)
+ }
+ if got := ofz32r8(5); got != 0 {
+ t.Errorf("0>>5 == %d, want 0", got)
+ }
+ if got := ofz32ur64(5); got != 0 {
+ t.Errorf("0>>>5 == %d, want 0", got)
+ }
+ if got := ofz32ur32(5); got != 0 {
+ t.Errorf("0>>>5 == %d, want 0", got)
+ }
+ if got := ofz32ur16(5); got != 0 {
+ t.Errorf("0>>>5 == %d, want 0", got)
+ }
+ if got := ofz32ur8(5); got != 0 {
+ t.Errorf("0>>>5 == %d, want 0", got)
+ }
+
+ if got := ofz16l64(5); got != 0 {
+ t.Errorf("0<<5 == %d, want 0", got)
+ }
+ if got := ofz16l32(5); got != 0 {
+ t.Errorf("0<<5 == %d, want 0", got)
+ }
+ if got := ofz16l16(5); got != 0 {
+ t.Errorf("0<<5 == %d, want 0", got)
+ }
+ if got := ofz16l8(5); got != 0 {
+ t.Errorf("0<<5 == %d, want 0", got)
+ }
+ if got := ofz16r64(5); got != 0 {
+ t.Errorf("0>>5 == %d, want 0", got)
+ }
+ if got := ofz16r32(5); got != 0 {
+ t.Errorf("0>>5 == %d, want 0", got)
+ }
+ if got := ofz16r16(5); got != 0 {
+ t.Errorf("0>>5 == %d, want 0", got)
+ }
+ if got := ofz16r8(5); got != 0 {
+ t.Errorf("0>>5 == %d, want 0", got)
+ }
+ if got := ofz16ur64(5); got != 0 {
+ t.Errorf("0>>>5 == %d, want 0", got)
+ }
+ if got := ofz16ur32(5); got != 0 {
+ t.Errorf("0>>>5 == %d, want 0", got)
+ }
+ if got := ofz16ur16(5); got != 0 {
+ t.Errorf("0>>>5 == %d, want 0", got)
+ }
+ if got := ofz16ur8(5); got != 0 {
+ t.Errorf("0>>>5 == %d, want 0", got)
+ }
+
+ if got := ofz8l64(5); got != 0 {
+ t.Errorf("0<<5 == %d, want 0", got)
+ }
+ if got := ofz8l32(5); got != 0 {
+ t.Errorf("0<<5 == %d, want 0", got)
+ }
+ if got := ofz8l16(5); got != 0 {
+ t.Errorf("0<<5 == %d, want 0", got)
+ }
+ if got := ofz8l8(5); got != 0 {
+ t.Errorf("0<<5 == %d, want 0", got)
+ }
+ if got := ofz8r64(5); got != 0 {
+ t.Errorf("0>>5 == %d, want 0", got)
+ }
+ if got := ofz8r32(5); got != 0 {
+ t.Errorf("0>>5 == %d, want 0", got)
+ }
+ if got := ofz8r16(5); got != 0 {
+ t.Errorf("0>>5 == %d, want 0", got)
+ }
+ if got := ofz8r8(5); got != 0 {
+ t.Errorf("0>>5 == %d, want 0", got)
+ }
+ if got := ofz8ur64(5); got != 0 {
+ t.Errorf("0>>>5 == %d, want 0", got)
+ }
+ if got := ofz8ur32(5); got != 0 {
+ t.Errorf("0>>>5 == %d, want 0", got)
+ }
+ if got := ofz8ur16(5); got != 0 {
+ t.Errorf("0>>>5 == %d, want 0", got)
+ }
+ if got := ofz8ur8(5); got != 0 {
+ t.Errorf("0>>>5 == %d, want 0", got)
+ }
+}
+
+//go:noinline
+func byz64l(n int64) int64 {
+ return n << 0
+}
+
+//go:noinline
+func byz64r(n int64) int64 {
+ return n >> 0
+}
+
+//go:noinline
+func byz64ur(n uint64) uint64 {
+ return n >> 0
+}
+
+//go:noinline
+func byz32l(n int32) int32 {
+ return n << 0
+}
+
+//go:noinline
+func byz32r(n int32) int32 {
+ return n >> 0
+}
+
+//go:noinline
+func byz32ur(n uint32) uint32 {
+ return n >> 0
+}
+
+//go:noinline
+func byz16l(n int16) int16 {
+ return n << 0
+}
+
+//go:noinline
+func byz16r(n int16) int16 {
+ return n >> 0
+}
+
+//go:noinline
+func byz16ur(n uint16) uint16 {
+ return n >> 0
+}
+
+//go:noinline
+func byz8l(n int8) int8 {
+ return n << 0
+}
+
+//go:noinline
+func byz8r(n int8) int8 {
+ return n >> 0
+}
+
+//go:noinline
+func byz8ur(n uint8) uint8 {
+ return n >> 0
+}
+
+func TestShiftByZero(t *testing.T) {
+ {
+ var n int64 = 0x5555555555555555
+ if got := byz64l(n); got != n {
+ t.Errorf("%x<<0 == %x, want %x", n, got, n)
+ }
+ if got := byz64r(n); got != n {
+ t.Errorf("%x>>0 == %x, want %x", n, got, n)
+ }
+ }
+ {
+ var n uint64 = 0xaaaaaaaaaaaaaaaa
+ if got := byz64ur(n); got != n {
+ t.Errorf("%x>>>0 == %x, want %x", n, got, n)
+ }
+ }
+
+ {
+ var n int32 = 0x55555555
+ if got := byz32l(n); got != n {
+ t.Errorf("%x<<0 == %x, want %x", n, got, n)
+ }
+ if got := byz32r(n); got != n {
+ t.Errorf("%x>>0 == %x, want %x", n, got, n)
+ }
+ }
+ {
+ var n uint32 = 0xaaaaaaaa
+ if got := byz32ur(n); got != n {
+ t.Errorf("%x>>>0 == %x, want %x", n, got, n)
+ }
+ }
+
+ {
+ var n int16 = 0x5555
+ if got := byz16l(n); got != n {
+ t.Errorf("%x<<0 == %x, want %x", n, got, n)
+ }
+ if got := byz16r(n); got != n {
+ t.Errorf("%x>>0 == %x, want %x", n, got, n)
+ }
+ }
+ {
+ var n uint16 = 0xaaaa
+ if got := byz16ur(n); got != n {
+ t.Errorf("%x>>>0 == %x, want %x", n, got, n)
+ }
+ }
+
+ {
+ var n int8 = 0x55
+ if got := byz8l(n); got != n {
+ t.Errorf("%x<<0 == %x, want %x", n, got, n)
+ }
+ if got := byz8r(n); got != n {
+ t.Errorf("%x>>0 == %x, want %x", n, got, n)
+ }
+ }
+ {
+ var n uint8 = 0x55
+ if got := byz8ur(n); got != n {
+ t.Errorf("%x>>>0 == %x, want %x", n, got, n)
+ }
+ }
+}
+
+//go:noinline
+func two64l(x int64) int64 {
+ return x << 1 << 1
+}
+
+//go:noinline
+func two64r(x int64) int64 {
+ return x >> 1 >> 1
+}
+
+//go:noinline
+func two64ur(x uint64) uint64 {
+ return x >> 1 >> 1
+}
+
+//go:noinline
+func two32l(x int32) int32 {
+ return x << 1 << 1
+}
+
+//go:noinline
+func two32r(x int32) int32 {
+ return x >> 1 >> 1
+}
+
+//go:noinline
+func two32ur(x uint32) uint32 {
+ return x >> 1 >> 1
+}
+
+//go:noinline
+func two16l(x int16) int16 {
+ return x << 1 << 1
+}
+
+//go:noinline
+func two16r(x int16) int16 {
+ return x >> 1 >> 1
+}
+
+//go:noinline
+func two16ur(x uint16) uint16 {
+ return x >> 1 >> 1
+}
+
+//go:noinline
+func two8l(x int8) int8 {
+ return x << 1 << 1
+}
+
+//go:noinline
+func two8r(x int8) int8 {
+ return x >> 1 >> 1
+}
+
+//go:noinline
+func two8ur(x uint8) uint8 {
+ return x >> 1 >> 1
+}
+
+func TestShiftCombine(t *testing.T) {
+ if got, want := two64l(4), int64(16); want != got {
+ t.Errorf("4<<1<<1 == %d, want %d", got, want)
+ }
+ if got, want := two64r(64), int64(16); want != got {
+ t.Errorf("64>>1>>1 == %d, want %d", got, want)
+ }
+ if got, want := two64ur(64), uint64(16); want != got {
+ t.Errorf("64>>1>>1 == %d, want %d", got, want)
+ }
+ if got, want := two32l(4), int32(16); want != got {
+ t.Errorf("4<<1<<1 == %d, want %d", got, want)
+ }
+ if got, want := two32r(64), int32(16); want != got {
+ t.Errorf("64>>1>>1 == %d, want %d", got, want)
+ }
+ if got, want := two32ur(64), uint32(16); want != got {
+ t.Errorf("64>>1>>1 == %d, want %d", got, want)
+ }
+ if got, want := two16l(4), int16(16); want != got {
+ t.Errorf("4<<1<<1 == %d, want %d", got, want)
+ }
+ if got, want := two16r(64), int16(16); want != got {
+ t.Errorf("64>>1>>1 == %d, want %d", got, want)
+ }
+ if got, want := two16ur(64), uint16(16); want != got {
+ t.Errorf("64>>1>>1 == %d, want %d", got, want)
+ }
+ if got, want := two8l(4), int8(16); want != got {
+ t.Errorf("4<<1<<1 == %d, want %d", got, want)
+ }
+ if got, want := two8r(64), int8(16); want != got {
+ t.Errorf("64>>1>>1 == %d, want %d", got, want)
+ }
+ if got, want := two8ur(64), uint8(16); want != got {
+ t.Errorf("64>>1>>1 == %d, want %d", got, want)
+ }
+
+}
+
+//go:noinline
+func three64l(x int64) int64 {
+ return x << 3 >> 1 << 2
+}
+
+//go:noinline
+func three64ul(x uint64) uint64 {
+ return x << 3 >> 1 << 2
+}
+
+//go:noinline
+func three64r(x int64) int64 {
+ return x >> 3 << 1 >> 2
+}
+
+//go:noinline
+func three64ur(x uint64) uint64 {
+ return x >> 3 << 1 >> 2
+}
+
+//go:noinline
+func three32l(x int32) int32 {
+ return x << 3 >> 1 << 2
+}
+
+//go:noinline
+func three32ul(x uint32) uint32 {
+ return x << 3 >> 1 << 2
+}
+
+//go:noinline
+func three32r(x int32) int32 {
+ return x >> 3 << 1 >> 2
+}
+
+//go:noinline
+func three32ur(x uint32) uint32 {
+ return x >> 3 << 1 >> 2
+}
+
+//go:noinline
+func three16l(x int16) int16 {
+ return x << 3 >> 1 << 2
+}
+
+//go:noinline
+func three16ul(x uint16) uint16 {
+ return x << 3 >> 1 << 2
+}
+
+//go:noinline
+func three16r(x int16) int16 {
+ return x >> 3 << 1 >> 2
+}
+
+//go:noinline
+func three16ur(x uint16) uint16 {
+ return x >> 3 << 1 >> 2
+}
+
+//go:noinline
+func three8l(x int8) int8 {
+ return x << 3 >> 1 << 2
+}
+
+//go:noinline
+func three8ul(x uint8) uint8 {
+ return x << 3 >> 1 << 2
+}
+
+//go:noinline
+func three8r(x int8) int8 {
+ return x >> 3 << 1 >> 2
+}
+
+//go:noinline
+func three8ur(x uint8) uint8 {
+ return x >> 3 << 1 >> 2
+}
+
+func TestShiftCombine3(t *testing.T) {
+ if got, want := three64l(4), int64(64); want != got {
+ t.Errorf("4<<1<<1 == %d, want %d", got, want)
+ }
+ if got, want := three64ul(4), uint64(64); want != got {
+ t.Errorf("4<<1<<1 == %d, want %d", got, want)
+ }
+ if got, want := three64r(64), int64(4); want != got {
+ t.Errorf("64>>1>>1 == %d, want %d", got, want)
+ }
+ if got, want := three64ur(64), uint64(4); want != got {
+ t.Errorf("64>>1>>1 == %d, want %d", got, want)
+ }
+ if got, want := three32l(4), int32(64); want != got {
+ t.Errorf("4<<1<<1 == %d, want %d", got, want)
+ }
+ if got, want := three32ul(4), uint32(64); want != got {
+ t.Errorf("4<<1<<1 == %d, want %d", got, want)
+ }
+ if got, want := three32r(64), int32(4); want != got {
+ t.Errorf("64>>1>>1 == %d, want %d", got, want)
+ }
+ if got, want := three32ur(64), uint32(4); want != got {
+ t.Errorf("64>>1>>1 == %d, want %d", got, want)
+ }
+ if got, want := three16l(4), int16(64); want != got {
+ t.Errorf("4<<1<<1 == %d, want %d", got, want)
+ }
+ if got, want := three16ul(4), uint16(64); want != got {
+ t.Errorf("4<<1<<1 == %d, want %d", got, want)
+ }
+ if got, want := three16r(64), int16(4); want != got {
+ t.Errorf("64>>1>>1 == %d, want %d", got, want)
+ }
+ if got, want := three16ur(64), uint16(4); want != got {
+ t.Errorf("64>>1>>1 == %d, want %d", got, want)
+ }
+ if got, want := three8l(4), int8(64); want != got {
+ t.Errorf("4<<1<<1 == %d, want %d", got, want)
+ }
+ if got, want := three8ul(4), uint8(64); want != got {
+ t.Errorf("4<<1<<1 == %d, want %d", got, want)
+ }
+ if got, want := three8r(64), int8(4); want != got {
+ t.Errorf("64>>1>>1 == %d, want %d", got, want)
+ }
+ if got, want := three8ur(64), uint8(4); want != got {
+ t.Errorf("64>>1>>1 == %d, want %d", got, want)
+ }
+}
+
+var (
+ one64 int64 = 1
+ one64u uint64 = 1
+ one32 int32 = 1
+ one32u uint32 = 1
+ one16 int16 = 1
+ one16u uint16 = 1
+ one8 int8 = 1
+ one8u uint8 = 1
+)
+
+func TestShiftLargeCombine(t *testing.T) {
+ var N uint64 = 0x8000000000000000
+ if one64<<N<<N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one64>>N>>N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one64u>>N>>N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one32<<N<<N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one32>>N>>N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one32u>>N>>N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one16<<N<<N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one16>>N>>N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one16u>>N>>N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one8<<N<<N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one8>>N>>N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one8u>>N>>N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+}
+
+func TestShiftLargeCombine3(t *testing.T) {
+ var N uint64 = 0x8000000000000001
+ if one64<<N>>2<<N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one64u<<N>>2<<N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one64>>N<<2>>N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one64u>>N<<2>>N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one32<<N>>2<<N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one32u<<N>>2<<N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one32>>N<<2>>N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one32u>>N<<2>>N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one16<<N>>2<<N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one16u<<N>>2<<N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one16>>N<<2>>N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one16u>>N<<2>>N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one8<<N>>2<<N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one8u<<N>>2<<N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one8>>N<<2>>N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+ if one8u>>N<<2>>N == 1 {
+ t.Errorf("shift overflow mishandled")
+ }
+}
// zero shifted.
(Lsh64x64 (Const64 [0]) _) -> (Const64 [0])
+(Lsh64x32 (Const64 [0]) _) -> (Const64 [0])
+(Lsh64x16 (Const64 [0]) _) -> (Const64 [0])
+(Lsh64x8 (Const64 [0]) _) -> (Const64 [0])
(Rsh64x64 (Const64 [0]) _) -> (Const64 [0])
+(Rsh64x32 (Const64 [0]) _) -> (Const64 [0])
+(Rsh64x16 (Const64 [0]) _) -> (Const64 [0])
+(Rsh64x8 (Const64 [0]) _) -> (Const64 [0])
(Rsh64Ux64 (Const64 [0]) _) -> (Const64 [0])
-(Lsh32x64 (Const64 [0]) _) -> (Const32 [0])
-(Rsh32x64 (Const64 [0]) _) -> (Const32 [0])
-(Rsh32Ux64 (Const64 [0]) _) -> (Const32 [0])
-(Lsh16x64 (Const64 [0]) _) -> (Const16 [0])
-(Rsh16x64 (Const64 [0]) _) -> (Const16 [0])
-(Rsh16Ux64 (Const64 [0]) _) -> (Const16 [0])
-(Lsh8x64 (Const64 [0]) _) -> (Const8 [0])
-(Rsh8x64 (Const64 [0]) _) -> (Const8 [0])
-(Rsh8Ux64 (Const64 [0]) _) -> (Const8 [0])
+(Rsh64Ux32 (Const64 [0]) _) -> (Const64 [0])
+(Rsh64Ux16 (Const64 [0]) _) -> (Const64 [0])
+(Rsh64Ux8 (Const64 [0]) _) -> (Const64 [0])
+(Lsh32x64 (Const32 [0]) _) -> (Const32 [0])
+(Lsh32x32 (Const32 [0]) _) -> (Const32 [0])
+(Lsh32x16 (Const32 [0]) _) -> (Const32 [0])
+(Lsh32x8 (Const32 [0]) _) -> (Const32 [0])
+(Rsh32x64 (Const32 [0]) _) -> (Const32 [0])
+(Rsh32x32 (Const32 [0]) _) -> (Const32 [0])
+(Rsh32x16 (Const32 [0]) _) -> (Const32 [0])
+(Rsh32x8 (Const32 [0]) _) -> (Const32 [0])
+(Rsh32Ux64 (Const32 [0]) _) -> (Const32 [0])
+(Rsh32Ux32 (Const32 [0]) _) -> (Const32 [0])
+(Rsh32Ux16 (Const32 [0]) _) -> (Const32 [0])
+(Rsh32Ux8 (Const32 [0]) _) -> (Const32 [0])
+(Lsh16x64 (Const16 [0]) _) -> (Const16 [0])
+(Lsh16x32 (Const16 [0]) _) -> (Const16 [0])
+(Lsh16x16 (Const16 [0]) _) -> (Const16 [0])
+(Lsh16x8 (Const16 [0]) _) -> (Const16 [0])
+(Rsh16x64 (Const16 [0]) _) -> (Const16 [0])
+(Rsh16x32 (Const16 [0]) _) -> (Const16 [0])
+(Rsh16x16 (Const16 [0]) _) -> (Const16 [0])
+(Rsh16x8 (Const16 [0]) _) -> (Const16 [0])
+(Rsh16Ux64 (Const16 [0]) _) -> (Const16 [0])
+(Rsh16Ux32 (Const16 [0]) _) -> (Const16 [0])
+(Rsh16Ux16 (Const16 [0]) _) -> (Const16 [0])
+(Rsh16Ux8 (Const16 [0]) _) -> (Const16 [0])
+(Lsh8x64 (Const8 [0]) _) -> (Const8 [0])
+(Lsh8x32 (Const8 [0]) _) -> (Const8 [0])
+(Lsh8x16 (Const8 [0]) _) -> (Const8 [0])
+(Lsh8x8 (Const8 [0]) _) -> (Const8 [0])
+(Rsh8x64 (Const8 [0]) _) -> (Const8 [0])
+(Rsh8x32 (Const8 [0]) _) -> (Const8 [0])
+(Rsh8x16 (Const8 [0]) _) -> (Const8 [0])
+(Rsh8x8 (Const8 [0]) _) -> (Const8 [0])
+(Rsh8Ux64 (Const8 [0]) _) -> (Const8 [0])
+(Rsh8Ux32 (Const8 [0]) _) -> (Const8 [0])
+(Rsh8Ux16 (Const8 [0]) _) -> (Const8 [0])
+(Rsh8Ux8 (Const8 [0]) _) -> (Const8 [0])
// large left shifts of all values, and right shifts of unsigned values
(Lsh64x64 _ (Const64 [c])) && uint64(c) >= 64 -> (Const64 [0])
// ((x >> c1) << c2) >> c3
(Rsh64Ux64 (Lsh64x64 (Rsh64Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
- && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2)
+ && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
-> (Rsh64Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
-(Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const32 [c1])) (Const32 [c2])) (Const32 [c3]))
- && uint32(c1) >= uint32(c2) && uint32(c3) >= uint32(c2)
- -> (Rsh32Ux64 x (Const32 <config.fe.TypeUInt32()> [int64(int32(c1-c2+c3))]))
-(Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const16 [c1])) (Const16 [c2])) (Const16 [c3]))
- && uint16(c1) >= uint16(c2) && uint16(c3) >= uint16(c2)
- -> (Rsh16Ux64 x (Const16 <config.fe.TypeUInt16()> [int64(int16(c1-c2+c3))]))
-(Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const8 [c1])) (Const8 [c2])) (Const8 [c3]))
- && uint8(c1) >= uint8(c2) && uint8(c3) >= uint8(c2)
- -> (Rsh8Ux64 x (Const8 <config.fe.TypeUInt8()> [int64(int8(c1-c2+c3))]))
+(Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
+ && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
+ -> (Rsh32Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
+(Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
+ && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
+ -> (Rsh16Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
+(Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
+ && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
+ -> (Rsh8Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
// ((x << c1) >> c2) << c3
(Lsh64x64 (Rsh64Ux64 (Lsh64x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
- && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2)
+ && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
-> (Lsh64x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
-(Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const32 [c1])) (Const32 [c2])) (Const32 [c3]))
- && uint32(c1) >= uint32(c2) && uint32(c3) >= uint32(c2)
- -> (Lsh32x64 x (Const32 <config.fe.TypeUInt32()> [int64(int32(c1-c2+c3))]))
-(Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const16 [c1])) (Const16 [c2])) (Const16 [c3]))
- && uint16(c1) >= uint16(c2) && uint16(c3) >= uint16(c2)
- -> (Lsh16x64 x (Const16 <config.fe.TypeUInt16()> [int64(int16(c1-c2+c3))]))
-(Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const8 [c1])) (Const8 [c2])) (Const8 [c3]))
- && uint8(c1) >= uint8(c2) && uint8(c3) >= uint8(c2)
- -> (Lsh8x64 x (Const8 <config.fe.TypeUInt8()> [int64(int8(c1-c2+c3))]))
+(Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
+ && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
+ -> (Lsh32x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
+(Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
+ && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
+ -> (Lsh16x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
+(Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
+ && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
+ -> (Lsh8x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
// constant comparisons
(Eq64 (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(c == d)])
v.AddArg(v0)
return true
}
+ // match: (Lsh16x16 (Const16 [0]) _)
+ // cond:
+ // result: (Const16 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst16 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst16)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpLsh16x32(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Lsh16x32 (Const16 [0]) _)
+ // cond:
+ // result: (Const16 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst16 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst16)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpLsh16x64(v *Value, config *Config) bool {
v.AddArg(x)
return true
}
- // match: (Lsh16x64 (Const64 [0]) _)
+ // match: (Lsh16x64 (Const16 [0]) _)
// cond:
// result: (Const16 [0])
for {
v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ if v_0.Op != OpConst16 {
break
}
if v_0.AuxInt != 0 {
v.AddArg(v0)
return true
}
- // match: (Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const16 [c1])) (Const16 [c2])) (Const16 [c3]))
- // cond: uint16(c1) >= uint16(c2) && uint16(c3) >= uint16(c2)
- // result: (Lsh16x64 x (Const16 <config.fe.TypeUInt16()> [int64(int16(c1-c2+c3))]))
+ // match: (Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
+ // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
+ // result: (Lsh16x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
for {
v_0 := v.Args[0]
if v_0.Op != OpRsh16Ux64 {
}
x := v_0_0.Args[0]
v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst16 {
+ if v_0_0_1.Op != OpConst64 {
break
}
c1 := v_0_0_1.AuxInt
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst16 {
+ if v_0_1.Op != OpConst64 {
break
}
c2 := v_0_1.AuxInt
v_1 := v.Args[1]
- if v_1.Op != OpConst16 {
+ if v_1.Op != OpConst64 {
break
}
c3 := v_1.AuxInt
- if !(uint16(c1) >= uint16(c2) && uint16(c3) >= uint16(c2)) {
+ if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
break
}
v.reset(OpLsh16x64)
v.AddArg(x)
- v0 := b.NewValue0(v.Line, OpConst16, config.fe.TypeUInt16())
- v0.AuxInt = int64(int16(c1 - c2 + c3))
+ v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
+ v0.AuxInt = c1 - c2 + c3
v.AddArg(v0)
return true
}
v.AddArg(v0)
return true
}
+ // match: (Lsh16x8 (Const16 [0]) _)
+ // cond:
+ // result: (Const16 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst16 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst16)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpLsh32x16(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Lsh32x16 (Const32 [0]) _)
+ // cond:
+ // result: (Const32 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst32 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst32)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpLsh32x32(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Lsh32x32 (Const32 [0]) _)
+ // cond:
+ // result: (Const32 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst32 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst32)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpLsh32x64(v *Value, config *Config) bool {
v.AddArg(x)
return true
}
- // match: (Lsh32x64 (Const64 [0]) _)
+ // match: (Lsh32x64 (Const32 [0]) _)
// cond:
// result: (Const32 [0])
for {
v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ if v_0.Op != OpConst32 {
break
}
if v_0.AuxInt != 0 {
v.AddArg(v0)
return true
}
- // match: (Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const32 [c1])) (Const32 [c2])) (Const32 [c3]))
- // cond: uint32(c1) >= uint32(c2) && uint32(c3) >= uint32(c2)
- // result: (Lsh32x64 x (Const32 <config.fe.TypeUInt32()> [int64(int32(c1-c2+c3))]))
+ // match: (Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
+ // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
+ // result: (Lsh32x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
for {
v_0 := v.Args[0]
if v_0.Op != OpRsh32Ux64 {
}
x := v_0_0.Args[0]
v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst32 {
+ if v_0_0_1.Op != OpConst64 {
break
}
c1 := v_0_0_1.AuxInt
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst32 {
+ if v_0_1.Op != OpConst64 {
break
}
c2 := v_0_1.AuxInt
v_1 := v.Args[1]
- if v_1.Op != OpConst32 {
+ if v_1.Op != OpConst64 {
break
}
c3 := v_1.AuxInt
- if !(uint32(c1) >= uint32(c2) && uint32(c3) >= uint32(c2)) {
+ if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
break
}
v.reset(OpLsh32x64)
v.AddArg(x)
- v0 := b.NewValue0(v.Line, OpConst32, config.fe.TypeUInt32())
- v0.AuxInt = int64(int32(c1 - c2 + c3))
+ v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
+ v0.AuxInt = c1 - c2 + c3
v.AddArg(v0)
return true
}
v.AddArg(v0)
return true
}
+ // match: (Lsh32x8 (Const32 [0]) _)
+ // cond:
+ // result: (Const32 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst32 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst32)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpLsh64x16(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Lsh64x16 (Const64 [0]) _)
+ // cond:
+ // result: (Const64 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst64)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpLsh64x32(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Lsh64x32 (Const64 [0]) _)
+ // cond:
+ // result: (Const64 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst64)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpLsh64x64(v *Value, config *Config) bool {
return true
}
// match: (Lsh64x64 (Rsh64Ux64 (Lsh64x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
- // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2)
+ // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
// result: (Lsh64x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
for {
v_0 := v.Args[0]
break
}
c3 := v_1.AuxInt
- if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2)) {
+ if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
break
}
v.reset(OpLsh64x64)
v.AddArg(v0)
return true
}
+ // match: (Lsh64x8 (Const64 [0]) _)
+ // cond:
+ // result: (Const64 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst64)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpLsh8x16(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Lsh8x16 (Const8 [0]) _)
+ // cond:
+ // result: (Const8 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst8)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpLsh8x32(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Lsh8x32 (Const8 [0]) _)
+ // cond:
+ // result: (Const8 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst8)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpLsh8x64(v *Value, config *Config) bool {
v.AddArg(x)
return true
}
- // match: (Lsh8x64 (Const64 [0]) _)
+ // match: (Lsh8x64 (Const8 [0]) _)
// cond:
// result: (Const8 [0])
for {
v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ if v_0.Op != OpConst8 {
break
}
if v_0.AuxInt != 0 {
v.AddArg(v0)
return true
}
- // match: (Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const8 [c1])) (Const8 [c2])) (Const8 [c3]))
- // cond: uint8(c1) >= uint8(c2) && uint8(c3) >= uint8(c2)
- // result: (Lsh8x64 x (Const8 <config.fe.TypeUInt8()> [int64(int8(c1-c2+c3))]))
+ // match: (Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
+ // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
+ // result: (Lsh8x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
for {
v_0 := v.Args[0]
if v_0.Op != OpRsh8Ux64 {
}
x := v_0_0.Args[0]
v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst8 {
+ if v_0_0_1.Op != OpConst64 {
break
}
c1 := v_0_0_1.AuxInt
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst8 {
+ if v_0_1.Op != OpConst64 {
break
}
c2 := v_0_1.AuxInt
v_1 := v.Args[1]
- if v_1.Op != OpConst8 {
+ if v_1.Op != OpConst64 {
break
}
c3 := v_1.AuxInt
- if !(uint8(c1) >= uint8(c2) && uint8(c3) >= uint8(c2)) {
+ if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
break
}
v.reset(OpLsh8x64)
v.AddArg(x)
- v0 := b.NewValue0(v.Line, OpConst8, config.fe.TypeUInt8())
- v0.AuxInt = int64(int8(c1 - c2 + c3))
+ v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
+ v0.AuxInt = c1 - c2 + c3
v.AddArg(v0)
return true
}
v.AddArg(v0)
return true
}
+ // match: (Lsh8x8 (Const8 [0]) _)
+ // cond:
+ // result: (Const8 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst8)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpMod16(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Rsh16Ux16 (Const16 [0]) _)
+ // cond:
+ // result: (Const16 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst16 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst16)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpRsh16Ux32(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Rsh16Ux32 (Const16 [0]) _)
+ // cond:
+ // result: (Const16 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst16 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst16)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpRsh16Ux64(v *Value, config *Config) bool {
v.AddArg(x)
return true
}
- // match: (Rsh16Ux64 (Const64 [0]) _)
+ // match: (Rsh16Ux64 (Const16 [0]) _)
// cond:
// result: (Const16 [0])
for {
v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ if v_0.Op != OpConst16 {
break
}
if v_0.AuxInt != 0 {
v.AddArg(v0)
return true
}
- // match: (Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const16 [c1])) (Const16 [c2])) (Const16 [c3]))
- // cond: uint16(c1) >= uint16(c2) && uint16(c3) >= uint16(c2)
- // result: (Rsh16Ux64 x (Const16 <config.fe.TypeUInt16()> [int64(int16(c1-c2+c3))]))
+ // match: (Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
+ // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
+ // result: (Rsh16Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
for {
v_0 := v.Args[0]
if v_0.Op != OpLsh16x64 {
}
x := v_0_0.Args[0]
v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst16 {
+ if v_0_0_1.Op != OpConst64 {
break
}
c1 := v_0_0_1.AuxInt
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst16 {
+ if v_0_1.Op != OpConst64 {
break
}
c2 := v_0_1.AuxInt
v_1 := v.Args[1]
- if v_1.Op != OpConst16 {
+ if v_1.Op != OpConst64 {
break
}
c3 := v_1.AuxInt
- if !(uint16(c1) >= uint16(c2) && uint16(c3) >= uint16(c2)) {
+ if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
break
}
v.reset(OpRsh16Ux64)
v.AddArg(x)
- v0 := b.NewValue0(v.Line, OpConst16, config.fe.TypeUInt16())
- v0.AuxInt = int64(int16(c1 - c2 + c3))
+ v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
+ v0.AuxInt = c1 - c2 + c3
v.AddArg(v0)
return true
}
v.AddArg(v0)
return true
}
+ // match: (Rsh16Ux8 (Const16 [0]) _)
+ // cond:
+ // result: (Const16 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst16 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst16)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpRsh16x16(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Rsh16x16 (Const16 [0]) _)
+ // cond:
+ // result: (Const16 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst16 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst16)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpRsh16x32(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Rsh16x32 (Const16 [0]) _)
+ // cond:
+ // result: (Const16 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst16 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst16)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpRsh16x64(v *Value, config *Config) bool {
v.AddArg(x)
return true
}
- // match: (Rsh16x64 (Const64 [0]) _)
+ // match: (Rsh16x64 (Const16 [0]) _)
// cond:
// result: (Const16 [0])
for {
v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ if v_0.Op != OpConst16 {
break
}
if v_0.AuxInt != 0 {
v.AddArg(v0)
return true
}
+ // match: (Rsh16x8 (Const16 [0]) _)
+ // cond:
+ // result: (Const16 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst16 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst16)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpRsh32Ux16(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Rsh32Ux16 (Const32 [0]) _)
+ // cond:
+ // result: (Const32 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst32 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst32)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpRsh32Ux32(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Rsh32Ux32 (Const32 [0]) _)
+ // cond:
+ // result: (Const32 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst32 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst32)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpRsh32Ux64(v *Value, config *Config) bool {
v.AddArg(x)
return true
}
- // match: (Rsh32Ux64 (Const64 [0]) _)
+ // match: (Rsh32Ux64 (Const32 [0]) _)
// cond:
// result: (Const32 [0])
for {
v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ if v_0.Op != OpConst32 {
break
}
if v_0.AuxInt != 0 {
v.AddArg(v0)
return true
}
- // match: (Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const32 [c1])) (Const32 [c2])) (Const32 [c3]))
- // cond: uint32(c1) >= uint32(c2) && uint32(c3) >= uint32(c2)
- // result: (Rsh32Ux64 x (Const32 <config.fe.TypeUInt32()> [int64(int32(c1-c2+c3))]))
+ // match: (Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
+ // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
+ // result: (Rsh32Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
for {
v_0 := v.Args[0]
if v_0.Op != OpLsh32x64 {
}
x := v_0_0.Args[0]
v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst32 {
+ if v_0_0_1.Op != OpConst64 {
break
}
c1 := v_0_0_1.AuxInt
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst32 {
+ if v_0_1.Op != OpConst64 {
break
}
c2 := v_0_1.AuxInt
v_1 := v.Args[1]
- if v_1.Op != OpConst32 {
+ if v_1.Op != OpConst64 {
break
}
c3 := v_1.AuxInt
- if !(uint32(c1) >= uint32(c2) && uint32(c3) >= uint32(c2)) {
+ if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
break
}
v.reset(OpRsh32Ux64)
v.AddArg(x)
- v0 := b.NewValue0(v.Line, OpConst32, config.fe.TypeUInt32())
- v0.AuxInt = int64(int32(c1 - c2 + c3))
+ v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
+ v0.AuxInt = c1 - c2 + c3
v.AddArg(v0)
return true
}
v.AddArg(v0)
return true
}
+ // match: (Rsh32Ux8 (Const32 [0]) _)
+ // cond:
+ // result: (Const32 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst32 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst32)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpRsh32x16(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Rsh32x16 (Const32 [0]) _)
+ // cond:
+ // result: (Const32 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst32 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst32)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpRsh32x32(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Rsh32x32 (Const32 [0]) _)
+ // cond:
+ // result: (Const32 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst32 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst32)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpRsh32x64(v *Value, config *Config) bool {
v.AddArg(x)
return true
}
- // match: (Rsh32x64 (Const64 [0]) _)
+ // match: (Rsh32x64 (Const32 [0]) _)
// cond:
// result: (Const32 [0])
for {
v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ if v_0.Op != OpConst32 {
break
}
if v_0.AuxInt != 0 {
v.AddArg(v0)
return true
}
+ // match: (Rsh32x8 (Const32 [0]) _)
+ // cond:
+ // result: (Const32 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst32 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst32)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpRsh64Ux16(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Rsh64Ux16 (Const64 [0]) _)
+ // cond:
+ // result: (Const64 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst64)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpRsh64Ux32(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Rsh64Ux32 (Const64 [0]) _)
+ // cond:
+ // result: (Const64 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst64)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpRsh64Ux64(v *Value, config *Config) bool {
return true
}
// match: (Rsh64Ux64 (Lsh64x64 (Rsh64Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
- // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2)
+ // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
// result: (Rsh64Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
for {
v_0 := v.Args[0]
break
}
c3 := v_1.AuxInt
- if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2)) {
+ if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
break
}
v.reset(OpRsh64Ux64)
v.AddArg(v0)
return true
}
+ // match: (Rsh64Ux8 (Const64 [0]) _)
+ // cond:
+ // result: (Const64 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst64)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpRsh64x16(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Rsh64x16 (Const64 [0]) _)
+ // cond:
+ // result: (Const64 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst64)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpRsh64x32(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Rsh64x32 (Const64 [0]) _)
+ // cond:
+ // result: (Const64 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst64)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpRsh64x64(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Rsh64x8 (Const64 [0]) _)
+ // cond:
+ // result: (Const64 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst64)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpRsh8Ux16(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Rsh8Ux16 (Const8 [0]) _)
+ // cond:
+ // result: (Const8 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst8)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpRsh8Ux32(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Rsh8Ux32 (Const8 [0]) _)
+ // cond:
+ // result: (Const8 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst8)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpRsh8Ux64(v *Value, config *Config) bool {
v.AddArg(x)
return true
}
- // match: (Rsh8Ux64 (Const64 [0]) _)
+ // match: (Rsh8Ux64 (Const8 [0]) _)
// cond:
// result: (Const8 [0])
for {
v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ if v_0.Op != OpConst8 {
break
}
if v_0.AuxInt != 0 {
v.AddArg(v0)
return true
}
- // match: (Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const8 [c1])) (Const8 [c2])) (Const8 [c3]))
- // cond: uint8(c1) >= uint8(c2) && uint8(c3) >= uint8(c2)
- // result: (Rsh8Ux64 x (Const8 <config.fe.TypeUInt8()> [int64(int8(c1-c2+c3))]))
+ // match: (Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
+ // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
+ // result: (Rsh8Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3]))
for {
v_0 := v.Args[0]
if v_0.Op != OpLsh8x64 {
}
x := v_0_0.Args[0]
v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst8 {
+ if v_0_0_1.Op != OpConst64 {
break
}
c1 := v_0_0_1.AuxInt
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst8 {
+ if v_0_1.Op != OpConst64 {
break
}
c2 := v_0_1.AuxInt
v_1 := v.Args[1]
- if v_1.Op != OpConst8 {
+ if v_1.Op != OpConst64 {
break
}
c3 := v_1.AuxInt
- if !(uint8(c1) >= uint8(c2) && uint8(c3) >= uint8(c2)) {
+ if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) {
break
}
v.reset(OpRsh8Ux64)
v.AddArg(x)
- v0 := b.NewValue0(v.Line, OpConst8, config.fe.TypeUInt8())
- v0.AuxInt = int64(int8(c1 - c2 + c3))
+ v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64())
+ v0.AuxInt = c1 - c2 + c3
v.AddArg(v0)
return true
}
v.AddArg(v0)
return true
}
+ // match: (Rsh8Ux8 (Const8 [0]) _)
+ // cond:
+ // result: (Const8 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst8)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpRsh8x16(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Rsh8x16 (Const8 [0]) _)
+ // cond:
+ // result: (Const8 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst8)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpRsh8x32(v *Value, config *Config) bool {
v.AddArg(v0)
return true
}
+ // match: (Rsh8x32 (Const8 [0]) _)
+ // cond:
+ // result: (Const8 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst8)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpRsh8x64(v *Value, config *Config) bool {
v.AddArg(x)
return true
}
- // match: (Rsh8x64 (Const64 [0]) _)
+ // match: (Rsh8x64 (Const8 [0]) _)
// cond:
// result: (Const8 [0])
for {
v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ if v_0.Op != OpConst8 {
break
}
if v_0.AuxInt != 0 {
v.AddArg(v0)
return true
}
+ // match: (Rsh8x8 (Const8 [0]) _)
+ // cond:
+ // result: (Const8 [0])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v.reset(OpConst8)
+ v.AuxInt = 0
+ return true
+ }
return false
}
func rewriteValuegeneric_OpSliceCap(v *Value, config *Config) bool {