From: Keith Randall Date: Sun, 29 May 2016 04:15:24 +0000 (-0700) Subject: cmd/compile: shift tests, fix triple-shift rules X-Git-Tag: go1.7beta1~41 X-Git-Url: http://www.git.cypherpunks.su/?a=commitdiff_plain;h=17396575135ba71472ab8a5b82f31af50d8bf312;p=gostls13.git cmd/compile: shift tests, fix triple-shift rules Add a bunch of tests for shifts. Fix triple-shift rules to always take constant shifts as 64 bits. (Earlier rules always promote shift amounts to 64 bits.) Add overflow checks. Increases generic rule coverage to 91% Change-Id: I6b42d368d19d36ac482dbb8e0d4f67e30ad7145d Reviewed-on: https://go-review.googlesource.com/23555 Reviewed-by: Todd Neal --- diff --git a/src/cmd/compile/internal/gc/shift_test.go b/src/cmd/compile/internal/gc/shift_test.go new file mode 100644 index 0000000000..cb6be777a7 --- /dev/null +++ b/src/cmd/compile/internal/gc/shift_test.go @@ -0,0 +1,907 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gc + +import "testing" + +// Tests shifts of zero. + +//go:noinline +func ofz64l64(n uint64) int64 { + var x int64 + return x << n +} + +//go:noinline +func ofz64l32(n uint32) int64 { + var x int64 + return x << n +} + +//go:noinline +func ofz64l16(n uint16) int64 { + var x int64 + return x << n +} + +//go:noinline +func ofz64l8(n uint8) int64 { + var x int64 + return x << n +} + +//go:noinline +func ofz64r64(n uint64) int64 { + var x int64 + return x >> n +} + +//go:noinline +func ofz64r32(n uint32) int64 { + var x int64 + return x >> n +} + +//go:noinline +func ofz64r16(n uint16) int64 { + var x int64 + return x >> n +} + +//go:noinline +func ofz64r8(n uint8) int64 { + var x int64 + return x >> n +} + +//go:noinline +func ofz64ur64(n uint64) uint64 { + var x uint64 + return x >> n +} + +//go:noinline +func ofz64ur32(n uint32) uint64 { + var x uint64 + return x >> n +} + +//go:noinline +func ofz64ur16(n uint16) uint64 { + var x uint64 + return x >> n +} + +//go:noinline +func ofz64ur8(n uint8) uint64 { + var x uint64 + return x >> n +} + +//go:noinline +func ofz32l64(n uint64) int32 { + var x int32 + return x << n +} + +//go:noinline +func ofz32l32(n uint32) int32 { + var x int32 + return x << n +} + +//go:noinline +func ofz32l16(n uint16) int32 { + var x int32 + return x << n +} + +//go:noinline +func ofz32l8(n uint8) int32 { + var x int32 + return x << n +} + +//go:noinline +func ofz32r64(n uint64) int32 { + var x int32 + return x >> n +} + +//go:noinline +func ofz32r32(n uint32) int32 { + var x int32 + return x >> n +} + +//go:noinline +func ofz32r16(n uint16) int32 { + var x int32 + return x >> n +} + +//go:noinline +func ofz32r8(n uint8) int32 { + var x int32 + return x >> n +} + +//go:noinline +func ofz32ur64(n uint64) uint32 { + var x uint32 + return x >> n +} + +//go:noinline +func ofz32ur32(n uint32) uint32 { + var x uint32 + return x >> n +} + +//go:noinline +func ofz32ur16(n uint16) uint32 { + var x uint32 + return x >> n +} + +//go:noinline +func ofz32ur8(n uint8) uint32 { + var x uint32 + return x >> n +} + +//go:noinline +func ofz16l64(n uint64) int16 { + var x int16 + return x << n +} + +//go:noinline +func ofz16l32(n uint32) int16 { + var x int16 + return x << n +} + +//go:noinline +func ofz16l16(n uint16) int16 { + var x int16 + return x << n +} + +//go:noinline +func ofz16l8(n uint8) int16 { + var x int16 + return x << n +} + +//go:noinline +func ofz16r64(n uint64) int16 { + var x int16 + return x >> n +} + +//go:noinline +func ofz16r32(n uint32) int16 { + var x int16 + return x >> n +} + +//go:noinline +func ofz16r16(n uint16) int16 { + var x int16 + return x >> n +} + +//go:noinline +func ofz16r8(n uint8) int16 { + var x int16 + return x >> n +} + +//go:noinline +func ofz16ur64(n uint64) uint16 { + var x uint16 + return x >> n +} + +//go:noinline +func ofz16ur32(n uint32) uint16 { + var x uint16 + return x >> n +} + +//go:noinline +func ofz16ur16(n uint16) uint16 { + var x uint16 + return x >> n +} + +//go:noinline +func ofz16ur8(n uint8) uint16 { + var x uint16 + return x >> n +} + +//go:noinline +func ofz8l64(n uint64) int8 { + var x int8 + return x << n +} + +//go:noinline +func ofz8l32(n uint32) int8 { + var x int8 + return x << n +} + +//go:noinline +func ofz8l16(n uint16) int8 { + var x int8 + return x << n +} + +//go:noinline +func ofz8l8(n uint8) int8 { + var x int8 + return x << n +} + +//go:noinline +func ofz8r64(n uint64) int8 { + var x int8 + return x >> n +} + +//go:noinline +func ofz8r32(n uint32) int8 { + var x int8 + return x >> n +} + +//go:noinline +func ofz8r16(n uint16) int8 { + var x int8 + return x >> n +} + +//go:noinline +func ofz8r8(n uint8) int8 { + var x int8 + return x >> n +} + +//go:noinline +func ofz8ur64(n uint64) uint8 { + var x uint8 + return x >> n +} + +//go:noinline +func ofz8ur32(n uint32) uint8 { + var x uint8 + return x >> n +} + +//go:noinline +func ofz8ur16(n uint16) uint8 { + var x uint8 + return x >> n +} + +//go:noinline +func ofz8ur8(n uint8) uint8 { + var x uint8 + return x >> n +} + +func TestShiftOfZero(t *testing.T) { + if got := ofz64l64(5); got != 0 { + t.Errorf("0<<5 == %d, want 0", got) + } + if got := ofz64l32(5); got != 0 { + t.Errorf("0<<5 == %d, want 0", got) + } + if got := ofz64l16(5); got != 0 { + t.Errorf("0<<5 == %d, want 0", got) + } + if got := ofz64l8(5); got != 0 { + t.Errorf("0<<5 == %d, want 0", got) + } + if got := ofz64r64(5); got != 0 { + t.Errorf("0>>5 == %d, want 0", got) + } + if got := ofz64r32(5); got != 0 { + t.Errorf("0>>5 == %d, want 0", got) + } + if got := ofz64r16(5); got != 0 { + t.Errorf("0>>5 == %d, want 0", got) + } + if got := ofz64r8(5); got != 0 { + t.Errorf("0>>5 == %d, want 0", got) + } + if got := ofz64ur64(5); got != 0 { + t.Errorf("0>>>5 == %d, want 0", got) + } + if got := ofz64ur32(5); got != 0 { + t.Errorf("0>>>5 == %d, want 0", got) + } + if got := ofz64ur16(5); got != 0 { + t.Errorf("0>>>5 == %d, want 0", got) + } + if got := ofz64ur8(5); got != 0 { + t.Errorf("0>>>5 == %d, want 0", got) + } + + if got := ofz32l64(5); got != 0 { + t.Errorf("0<<5 == %d, want 0", got) + } + if got := ofz32l32(5); got != 0 { + t.Errorf("0<<5 == %d, want 0", got) + } + if got := ofz32l16(5); got != 0 { + t.Errorf("0<<5 == %d, want 0", got) + } + if got := ofz32l8(5); got != 0 { + t.Errorf("0<<5 == %d, want 0", got) + } + if got := ofz32r64(5); got != 0 { + t.Errorf("0>>5 == %d, want 0", got) + } + if got := ofz32r32(5); got != 0 { + t.Errorf("0>>5 == %d, want 0", got) + } + if got := ofz32r16(5); got != 0 { + t.Errorf("0>>5 == %d, want 0", got) + } + if got := ofz32r8(5); got != 0 { + t.Errorf("0>>5 == %d, want 0", got) + } + if got := ofz32ur64(5); got != 0 { + t.Errorf("0>>>5 == %d, want 0", got) + } + if got := ofz32ur32(5); got != 0 { + t.Errorf("0>>>5 == %d, want 0", got) + } + if got := ofz32ur16(5); got != 0 { + t.Errorf("0>>>5 == %d, want 0", got) + } + if got := ofz32ur8(5); got != 0 { + t.Errorf("0>>>5 == %d, want 0", got) + } + + if got := ofz16l64(5); got != 0 { + t.Errorf("0<<5 == %d, want 0", got) + } + if got := ofz16l32(5); got != 0 { + t.Errorf("0<<5 == %d, want 0", got) + } + if got := ofz16l16(5); got != 0 { + t.Errorf("0<<5 == %d, want 0", got) + } + if got := ofz16l8(5); got != 0 { + t.Errorf("0<<5 == %d, want 0", got) + } + if got := ofz16r64(5); got != 0 { + t.Errorf("0>>5 == %d, want 0", got) + } + if got := ofz16r32(5); got != 0 { + t.Errorf("0>>5 == %d, want 0", got) + } + if got := ofz16r16(5); got != 0 { + t.Errorf("0>>5 == %d, want 0", got) + } + if got := ofz16r8(5); got != 0 { + t.Errorf("0>>5 == %d, want 0", got) + } + if got := ofz16ur64(5); got != 0 { + t.Errorf("0>>>5 == %d, want 0", got) + } + if got := ofz16ur32(5); got != 0 { + t.Errorf("0>>>5 == %d, want 0", got) + } + if got := ofz16ur16(5); got != 0 { + t.Errorf("0>>>5 == %d, want 0", got) + } + if got := ofz16ur8(5); got != 0 { + t.Errorf("0>>>5 == %d, want 0", got) + } + + if got := ofz8l64(5); got != 0 { + t.Errorf("0<<5 == %d, want 0", got) + } + if got := ofz8l32(5); got != 0 { + t.Errorf("0<<5 == %d, want 0", got) + } + if got := ofz8l16(5); got != 0 { + t.Errorf("0<<5 == %d, want 0", got) + } + if got := ofz8l8(5); got != 0 { + t.Errorf("0<<5 == %d, want 0", got) + } + if got := ofz8r64(5); got != 0 { + t.Errorf("0>>5 == %d, want 0", got) + } + if got := ofz8r32(5); got != 0 { + t.Errorf("0>>5 == %d, want 0", got) + } + if got := ofz8r16(5); got != 0 { + t.Errorf("0>>5 == %d, want 0", got) + } + if got := ofz8r8(5); got != 0 { + t.Errorf("0>>5 == %d, want 0", got) + } + if got := ofz8ur64(5); got != 0 { + t.Errorf("0>>>5 == %d, want 0", got) + } + if got := ofz8ur32(5); got != 0 { + t.Errorf("0>>>5 == %d, want 0", got) + } + if got := ofz8ur16(5); got != 0 { + t.Errorf("0>>>5 == %d, want 0", got) + } + if got := ofz8ur8(5); got != 0 { + t.Errorf("0>>>5 == %d, want 0", got) + } +} + +//go:noinline +func byz64l(n int64) int64 { + return n << 0 +} + +//go:noinline +func byz64r(n int64) int64 { + return n >> 0 +} + +//go:noinline +func byz64ur(n uint64) uint64 { + return n >> 0 +} + +//go:noinline +func byz32l(n int32) int32 { + return n << 0 +} + +//go:noinline +func byz32r(n int32) int32 { + return n >> 0 +} + +//go:noinline +func byz32ur(n uint32) uint32 { + return n >> 0 +} + +//go:noinline +func byz16l(n int16) int16 { + return n << 0 +} + +//go:noinline +func byz16r(n int16) int16 { + return n >> 0 +} + +//go:noinline +func byz16ur(n uint16) uint16 { + return n >> 0 +} + +//go:noinline +func byz8l(n int8) int8 { + return n << 0 +} + +//go:noinline +func byz8r(n int8) int8 { + return n >> 0 +} + +//go:noinline +func byz8ur(n uint8) uint8 { + return n >> 0 +} + +func TestShiftByZero(t *testing.T) { + { + var n int64 = 0x5555555555555555 + if got := byz64l(n); got != n { + t.Errorf("%x<<0 == %x, want %x", n, got, n) + } + if got := byz64r(n); got != n { + t.Errorf("%x>>0 == %x, want %x", n, got, n) + } + } + { + var n uint64 = 0xaaaaaaaaaaaaaaaa + if got := byz64ur(n); got != n { + t.Errorf("%x>>>0 == %x, want %x", n, got, n) + } + } + + { + var n int32 = 0x55555555 + if got := byz32l(n); got != n { + t.Errorf("%x<<0 == %x, want %x", n, got, n) + } + if got := byz32r(n); got != n { + t.Errorf("%x>>0 == %x, want %x", n, got, n) + } + } + { + var n uint32 = 0xaaaaaaaa + if got := byz32ur(n); got != n { + t.Errorf("%x>>>0 == %x, want %x", n, got, n) + } + } + + { + var n int16 = 0x5555 + if got := byz16l(n); got != n { + t.Errorf("%x<<0 == %x, want %x", n, got, n) + } + if got := byz16r(n); got != n { + t.Errorf("%x>>0 == %x, want %x", n, got, n) + } + } + { + var n uint16 = 0xaaaa + if got := byz16ur(n); got != n { + t.Errorf("%x>>>0 == %x, want %x", n, got, n) + } + } + + { + var n int8 = 0x55 + if got := byz8l(n); got != n { + t.Errorf("%x<<0 == %x, want %x", n, got, n) + } + if got := byz8r(n); got != n { + t.Errorf("%x>>0 == %x, want %x", n, got, n) + } + } + { + var n uint8 = 0x55 + if got := byz8ur(n); got != n { + t.Errorf("%x>>>0 == %x, want %x", n, got, n) + } + } +} + +//go:noinline +func two64l(x int64) int64 { + return x << 1 << 1 +} + +//go:noinline +func two64r(x int64) int64 { + return x >> 1 >> 1 +} + +//go:noinline +func two64ur(x uint64) uint64 { + return x >> 1 >> 1 +} + +//go:noinline +func two32l(x int32) int32 { + return x << 1 << 1 +} + +//go:noinline +func two32r(x int32) int32 { + return x >> 1 >> 1 +} + +//go:noinline +func two32ur(x uint32) uint32 { + return x >> 1 >> 1 +} + +//go:noinline +func two16l(x int16) int16 { + return x << 1 << 1 +} + +//go:noinline +func two16r(x int16) int16 { + return x >> 1 >> 1 +} + +//go:noinline +func two16ur(x uint16) uint16 { + return x >> 1 >> 1 +} + +//go:noinline +func two8l(x int8) int8 { + return x << 1 << 1 +} + +//go:noinline +func two8r(x int8) int8 { + return x >> 1 >> 1 +} + +//go:noinline +func two8ur(x uint8) uint8 { + return x >> 1 >> 1 +} + +func TestShiftCombine(t *testing.T) { + if got, want := two64l(4), int64(16); want != got { + t.Errorf("4<<1<<1 == %d, want %d", got, want) + } + if got, want := two64r(64), int64(16); want != got { + t.Errorf("64>>1>>1 == %d, want %d", got, want) + } + if got, want := two64ur(64), uint64(16); want != got { + t.Errorf("64>>1>>1 == %d, want %d", got, want) + } + if got, want := two32l(4), int32(16); want != got { + t.Errorf("4<<1<<1 == %d, want %d", got, want) + } + if got, want := two32r(64), int32(16); want != got { + t.Errorf("64>>1>>1 == %d, want %d", got, want) + } + if got, want := two32ur(64), uint32(16); want != got { + t.Errorf("64>>1>>1 == %d, want %d", got, want) + } + if got, want := two16l(4), int16(16); want != got { + t.Errorf("4<<1<<1 == %d, want %d", got, want) + } + if got, want := two16r(64), int16(16); want != got { + t.Errorf("64>>1>>1 == %d, want %d", got, want) + } + if got, want := two16ur(64), uint16(16); want != got { + t.Errorf("64>>1>>1 == %d, want %d", got, want) + } + if got, want := two8l(4), int8(16); want != got { + t.Errorf("4<<1<<1 == %d, want %d", got, want) + } + if got, want := two8r(64), int8(16); want != got { + t.Errorf("64>>1>>1 == %d, want %d", got, want) + } + if got, want := two8ur(64), uint8(16); want != got { + t.Errorf("64>>1>>1 == %d, want %d", got, want) + } + +} + +//go:noinline +func three64l(x int64) int64 { + return x << 3 >> 1 << 2 +} + +//go:noinline +func three64ul(x uint64) uint64 { + return x << 3 >> 1 << 2 +} + +//go:noinline +func three64r(x int64) int64 { + return x >> 3 << 1 >> 2 +} + +//go:noinline +func three64ur(x uint64) uint64 { + return x >> 3 << 1 >> 2 +} + +//go:noinline +func three32l(x int32) int32 { + return x << 3 >> 1 << 2 +} + +//go:noinline +func three32ul(x uint32) uint32 { + return x << 3 >> 1 << 2 +} + +//go:noinline +func three32r(x int32) int32 { + return x >> 3 << 1 >> 2 +} + +//go:noinline +func three32ur(x uint32) uint32 { + return x >> 3 << 1 >> 2 +} + +//go:noinline +func three16l(x int16) int16 { + return x << 3 >> 1 << 2 +} + +//go:noinline +func three16ul(x uint16) uint16 { + return x << 3 >> 1 << 2 +} + +//go:noinline +func three16r(x int16) int16 { + return x >> 3 << 1 >> 2 +} + +//go:noinline +func three16ur(x uint16) uint16 { + return x >> 3 << 1 >> 2 +} + +//go:noinline +func three8l(x int8) int8 { + return x << 3 >> 1 << 2 +} + +//go:noinline +func three8ul(x uint8) uint8 { + return x << 3 >> 1 << 2 +} + +//go:noinline +func three8r(x int8) int8 { + return x >> 3 << 1 >> 2 +} + +//go:noinline +func three8ur(x uint8) uint8 { + return x >> 3 << 1 >> 2 +} + +func TestShiftCombine3(t *testing.T) { + if got, want := three64l(4), int64(64); want != got { + t.Errorf("4<<1<<1 == %d, want %d", got, want) + } + if got, want := three64ul(4), uint64(64); want != got { + t.Errorf("4<<1<<1 == %d, want %d", got, want) + } + if got, want := three64r(64), int64(4); want != got { + t.Errorf("64>>1>>1 == %d, want %d", got, want) + } + if got, want := three64ur(64), uint64(4); want != got { + t.Errorf("64>>1>>1 == %d, want %d", got, want) + } + if got, want := three32l(4), int32(64); want != got { + t.Errorf("4<<1<<1 == %d, want %d", got, want) + } + if got, want := three32ul(4), uint32(64); want != got { + t.Errorf("4<<1<<1 == %d, want %d", got, want) + } + if got, want := three32r(64), int32(4); want != got { + t.Errorf("64>>1>>1 == %d, want %d", got, want) + } + if got, want := three32ur(64), uint32(4); want != got { + t.Errorf("64>>1>>1 == %d, want %d", got, want) + } + if got, want := three16l(4), int16(64); want != got { + t.Errorf("4<<1<<1 == %d, want %d", got, want) + } + if got, want := three16ul(4), uint16(64); want != got { + t.Errorf("4<<1<<1 == %d, want %d", got, want) + } + if got, want := three16r(64), int16(4); want != got { + t.Errorf("64>>1>>1 == %d, want %d", got, want) + } + if got, want := three16ur(64), uint16(4); want != got { + t.Errorf("64>>1>>1 == %d, want %d", got, want) + } + if got, want := three8l(4), int8(64); want != got { + t.Errorf("4<<1<<1 == %d, want %d", got, want) + } + if got, want := three8ul(4), uint8(64); want != got { + t.Errorf("4<<1<<1 == %d, want %d", got, want) + } + if got, want := three8r(64), int8(4); want != got { + t.Errorf("64>>1>>1 == %d, want %d", got, want) + } + if got, want := three8ur(64), uint8(4); want != got { + t.Errorf("64>>1>>1 == %d, want %d", got, want) + } +} + +var ( + one64 int64 = 1 + one64u uint64 = 1 + one32 int32 = 1 + one32u uint32 = 1 + one16 int16 = 1 + one16u uint16 = 1 + one8 int8 = 1 + one8u uint8 = 1 +) + +func TestShiftLargeCombine(t *testing.T) { + var N uint64 = 0x8000000000000000 + if one64<>N>>N == 1 { + t.Errorf("shift overflow mishandled") + } + if one64u>>N>>N == 1 { + t.Errorf("shift overflow mishandled") + } + if one32<>N>>N == 1 { + t.Errorf("shift overflow mishandled") + } + if one32u>>N>>N == 1 { + t.Errorf("shift overflow mishandled") + } + if one16<>N>>N == 1 { + t.Errorf("shift overflow mishandled") + } + if one16u>>N>>N == 1 { + t.Errorf("shift overflow mishandled") + } + if one8<>N>>N == 1 { + t.Errorf("shift overflow mishandled") + } + if one8u>>N>>N == 1 { + t.Errorf("shift overflow mishandled") + } +} + +func TestShiftLargeCombine3(t *testing.T) { + var N uint64 = 0x8000000000000001 + if one64<>2<>2<>N<<2>>N == 1 { + t.Errorf("shift overflow mishandled") + } + if one64u>>N<<2>>N == 1 { + t.Errorf("shift overflow mishandled") + } + if one32<>2<>2<>N<<2>>N == 1 { + t.Errorf("shift overflow mishandled") + } + if one32u>>N<<2>>N == 1 { + t.Errorf("shift overflow mishandled") + } + if one16<>2<>2<>N<<2>>N == 1 { + t.Errorf("shift overflow mishandled") + } + if one16u>>N<<2>>N == 1 { + t.Errorf("shift overflow mishandled") + } + if one8<>2<>2<>N<<2>>N == 1 { + t.Errorf("shift overflow mishandled") + } + if one8u>>N<<2>>N == 1 { + t.Errorf("shift overflow mishandled") + } +} diff --git a/src/cmd/compile/internal/ssa/gen/generic.rules b/src/cmd/compile/internal/ssa/gen/generic.rules index 7460f183d7..f5d1c98a83 100644 --- a/src/cmd/compile/internal/ssa/gen/generic.rules +++ b/src/cmd/compile/internal/ssa/gen/generic.rules @@ -246,17 +246,53 @@ // zero shifted. (Lsh64x64 (Const64 [0]) _) -> (Const64 [0]) +(Lsh64x32 (Const64 [0]) _) -> (Const64 [0]) +(Lsh64x16 (Const64 [0]) _) -> (Const64 [0]) +(Lsh64x8 (Const64 [0]) _) -> (Const64 [0]) (Rsh64x64 (Const64 [0]) _) -> (Const64 [0]) +(Rsh64x32 (Const64 [0]) _) -> (Const64 [0]) +(Rsh64x16 (Const64 [0]) _) -> (Const64 [0]) +(Rsh64x8 (Const64 [0]) _) -> (Const64 [0]) (Rsh64Ux64 (Const64 [0]) _) -> (Const64 [0]) -(Lsh32x64 (Const64 [0]) _) -> (Const32 [0]) -(Rsh32x64 (Const64 [0]) _) -> (Const32 [0]) -(Rsh32Ux64 (Const64 [0]) _) -> (Const32 [0]) -(Lsh16x64 (Const64 [0]) _) -> (Const16 [0]) -(Rsh16x64 (Const64 [0]) _) -> (Const16 [0]) -(Rsh16Ux64 (Const64 [0]) _) -> (Const16 [0]) -(Lsh8x64 (Const64 [0]) _) -> (Const8 [0]) -(Rsh8x64 (Const64 [0]) _) -> (Const8 [0]) -(Rsh8Ux64 (Const64 [0]) _) -> (Const8 [0]) +(Rsh64Ux32 (Const64 [0]) _) -> (Const64 [0]) +(Rsh64Ux16 (Const64 [0]) _) -> (Const64 [0]) +(Rsh64Ux8 (Const64 [0]) _) -> (Const64 [0]) +(Lsh32x64 (Const32 [0]) _) -> (Const32 [0]) +(Lsh32x32 (Const32 [0]) _) -> (Const32 [0]) +(Lsh32x16 (Const32 [0]) _) -> (Const32 [0]) +(Lsh32x8 (Const32 [0]) _) -> (Const32 [0]) +(Rsh32x64 (Const32 [0]) _) -> (Const32 [0]) +(Rsh32x32 (Const32 [0]) _) -> (Const32 [0]) +(Rsh32x16 (Const32 [0]) _) -> (Const32 [0]) +(Rsh32x8 (Const32 [0]) _) -> (Const32 [0]) +(Rsh32Ux64 (Const32 [0]) _) -> (Const32 [0]) +(Rsh32Ux32 (Const32 [0]) _) -> (Const32 [0]) +(Rsh32Ux16 (Const32 [0]) _) -> (Const32 [0]) +(Rsh32Ux8 (Const32 [0]) _) -> (Const32 [0]) +(Lsh16x64 (Const16 [0]) _) -> (Const16 [0]) +(Lsh16x32 (Const16 [0]) _) -> (Const16 [0]) +(Lsh16x16 (Const16 [0]) _) -> (Const16 [0]) +(Lsh16x8 (Const16 [0]) _) -> (Const16 [0]) +(Rsh16x64 (Const16 [0]) _) -> (Const16 [0]) +(Rsh16x32 (Const16 [0]) _) -> (Const16 [0]) +(Rsh16x16 (Const16 [0]) _) -> (Const16 [0]) +(Rsh16x8 (Const16 [0]) _) -> (Const16 [0]) +(Rsh16Ux64 (Const16 [0]) _) -> (Const16 [0]) +(Rsh16Ux32 (Const16 [0]) _) -> (Const16 [0]) +(Rsh16Ux16 (Const16 [0]) _) -> (Const16 [0]) +(Rsh16Ux8 (Const16 [0]) _) -> (Const16 [0]) +(Lsh8x64 (Const8 [0]) _) -> (Const8 [0]) +(Lsh8x32 (Const8 [0]) _) -> (Const8 [0]) +(Lsh8x16 (Const8 [0]) _) -> (Const8 [0]) +(Lsh8x8 (Const8 [0]) _) -> (Const8 [0]) +(Rsh8x64 (Const8 [0]) _) -> (Const8 [0]) +(Rsh8x32 (Const8 [0]) _) -> (Const8 [0]) +(Rsh8x16 (Const8 [0]) _) -> (Const8 [0]) +(Rsh8x8 (Const8 [0]) _) -> (Const8 [0]) +(Rsh8Ux64 (Const8 [0]) _) -> (Const8 [0]) +(Rsh8Ux32 (Const8 [0]) _) -> (Const8 [0]) +(Rsh8Ux16 (Const8 [0]) _) -> (Const8 [0]) +(Rsh8Ux8 (Const8 [0]) _) -> (Const8 [0]) // large left shifts of all values, and right shifts of unsigned values (Lsh64x64 _ (Const64 [c])) && uint64(c) >= 64 -> (Const64 [0]) @@ -286,31 +322,31 @@ // ((x >> c1) << c2) >> c3 (Rsh64Ux64 (Lsh64x64 (Rsh64Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) - && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) + && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) -> (Rsh64Ux64 x (Const64 [c1-c2+c3])) -(Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const32 [c1])) (Const32 [c2])) (Const32 [c3])) - && uint32(c1) >= uint32(c2) && uint32(c3) >= uint32(c2) - -> (Rsh32Ux64 x (Const32 [int64(int32(c1-c2+c3))])) -(Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const16 [c1])) (Const16 [c2])) (Const16 [c3])) - && uint16(c1) >= uint16(c2) && uint16(c3) >= uint16(c2) - -> (Rsh16Ux64 x (Const16 [int64(int16(c1-c2+c3))])) -(Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const8 [c1])) (Const8 [c2])) (Const8 [c3])) - && uint8(c1) >= uint8(c2) && uint8(c3) >= uint8(c2) - -> (Rsh8Ux64 x (Const8 [int64(int8(c1-c2+c3))])) +(Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) + && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) + -> (Rsh32Ux64 x (Const64 [c1-c2+c3])) +(Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) + && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) + -> (Rsh16Ux64 x (Const64 [c1-c2+c3])) +(Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) + && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) + -> (Rsh8Ux64 x (Const64 [c1-c2+c3])) // ((x << c1) >> c2) << c3 (Lsh64x64 (Rsh64Ux64 (Lsh64x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) - && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) + && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) -> (Lsh64x64 x (Const64 [c1-c2+c3])) -(Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const32 [c1])) (Const32 [c2])) (Const32 [c3])) - && uint32(c1) >= uint32(c2) && uint32(c3) >= uint32(c2) - -> (Lsh32x64 x (Const32 [int64(int32(c1-c2+c3))])) -(Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const16 [c1])) (Const16 [c2])) (Const16 [c3])) - && uint16(c1) >= uint16(c2) && uint16(c3) >= uint16(c2) - -> (Lsh16x64 x (Const16 [int64(int16(c1-c2+c3))])) -(Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const8 [c1])) (Const8 [c2])) (Const8 [c3])) - && uint8(c1) >= uint8(c2) && uint8(c3) >= uint8(c2) - -> (Lsh8x64 x (Const8 [int64(int8(c1-c2+c3))])) +(Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) + && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) + -> (Lsh32x64 x (Const64 [c1-c2+c3])) +(Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) + && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) + -> (Lsh16x64 x (Const64 [c1-c2+c3])) +(Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) + && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) + -> (Lsh8x64 x (Const64 [c1-c2+c3])) // constant comparisons (Eq64 (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(c == d)]) diff --git a/src/cmd/compile/internal/ssa/rewritegeneric.go b/src/cmd/compile/internal/ssa/rewritegeneric.go index c702919a57..9f08b3c4eb 100644 --- a/src/cmd/compile/internal/ssa/rewritegeneric.go +++ b/src/cmd/compile/internal/ssa/rewritegeneric.go @@ -3823,6 +3823,21 @@ func rewriteValuegeneric_OpLsh16x16(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Lsh16x16 (Const16 [0]) _) + // cond: + // result: (Const16 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst16 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst16) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpLsh16x32(v *Value, config *Config) bool { @@ -3846,6 +3861,21 @@ func rewriteValuegeneric_OpLsh16x32(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Lsh16x32 (Const16 [0]) _) + // cond: + // result: (Const16 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst16 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst16) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpLsh16x64(v *Value, config *Config) bool { @@ -3886,12 +3916,12 @@ func rewriteValuegeneric_OpLsh16x64(v *Value, config *Config) bool { v.AddArg(x) return true } - // match: (Lsh16x64 (Const64 [0]) _) + // match: (Lsh16x64 (Const16 [0]) _) // cond: // result: (Const16 [0]) for { v_0 := v.Args[0] - if v_0.Op != OpConst64 { + if v_0.Op != OpConst16 { break } if v_0.AuxInt != 0 { @@ -3947,9 +3977,9 @@ func rewriteValuegeneric_OpLsh16x64(v *Value, config *Config) bool { v.AddArg(v0) return true } - // match: (Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const16 [c1])) (Const16 [c2])) (Const16 [c3])) - // cond: uint16(c1) >= uint16(c2) && uint16(c3) >= uint16(c2) - // result: (Lsh16x64 x (Const16 [int64(int16(c1-c2+c3))])) + // match: (Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) + // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) + // result: (Lsh16x64 x (Const64 [c1-c2+c3])) for { v_0 := v.Args[0] if v_0.Op != OpRsh16Ux64 { @@ -3961,27 +3991,27 @@ func rewriteValuegeneric_OpLsh16x64(v *Value, config *Config) bool { } x := v_0_0.Args[0] v_0_0_1 := v_0_0.Args[1] - if v_0_0_1.Op != OpConst16 { + if v_0_0_1.Op != OpConst64 { break } c1 := v_0_0_1.AuxInt v_0_1 := v_0.Args[1] - if v_0_1.Op != OpConst16 { + if v_0_1.Op != OpConst64 { break } c2 := v_0_1.AuxInt v_1 := v.Args[1] - if v_1.Op != OpConst16 { + if v_1.Op != OpConst64 { break } c3 := v_1.AuxInt - if !(uint16(c1) >= uint16(c2) && uint16(c3) >= uint16(c2)) { + if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { break } v.reset(OpLsh16x64) v.AddArg(x) - v0 := b.NewValue0(v.Line, OpConst16, config.fe.TypeUInt16()) - v0.AuxInt = int64(int16(c1 - c2 + c3)) + v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) + v0.AuxInt = c1 - c2 + c3 v.AddArg(v0) return true } @@ -4008,6 +4038,21 @@ func rewriteValuegeneric_OpLsh16x8(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Lsh16x8 (Const16 [0]) _) + // cond: + // result: (Const16 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst16 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst16) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpLsh32x16(v *Value, config *Config) bool { @@ -4031,6 +4076,21 @@ func rewriteValuegeneric_OpLsh32x16(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Lsh32x16 (Const32 [0]) _) + // cond: + // result: (Const32 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst32 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst32) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpLsh32x32(v *Value, config *Config) bool { @@ -4054,6 +4114,21 @@ func rewriteValuegeneric_OpLsh32x32(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Lsh32x32 (Const32 [0]) _) + // cond: + // result: (Const32 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst32 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst32) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpLsh32x64(v *Value, config *Config) bool { @@ -4094,12 +4169,12 @@ func rewriteValuegeneric_OpLsh32x64(v *Value, config *Config) bool { v.AddArg(x) return true } - // match: (Lsh32x64 (Const64 [0]) _) + // match: (Lsh32x64 (Const32 [0]) _) // cond: // result: (Const32 [0]) for { v_0 := v.Args[0] - if v_0.Op != OpConst64 { + if v_0.Op != OpConst32 { break } if v_0.AuxInt != 0 { @@ -4155,9 +4230,9 @@ func rewriteValuegeneric_OpLsh32x64(v *Value, config *Config) bool { v.AddArg(v0) return true } - // match: (Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const32 [c1])) (Const32 [c2])) (Const32 [c3])) - // cond: uint32(c1) >= uint32(c2) && uint32(c3) >= uint32(c2) - // result: (Lsh32x64 x (Const32 [int64(int32(c1-c2+c3))])) + // match: (Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) + // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) + // result: (Lsh32x64 x (Const64 [c1-c2+c3])) for { v_0 := v.Args[0] if v_0.Op != OpRsh32Ux64 { @@ -4169,27 +4244,27 @@ func rewriteValuegeneric_OpLsh32x64(v *Value, config *Config) bool { } x := v_0_0.Args[0] v_0_0_1 := v_0_0.Args[1] - if v_0_0_1.Op != OpConst32 { + if v_0_0_1.Op != OpConst64 { break } c1 := v_0_0_1.AuxInt v_0_1 := v_0.Args[1] - if v_0_1.Op != OpConst32 { + if v_0_1.Op != OpConst64 { break } c2 := v_0_1.AuxInt v_1 := v.Args[1] - if v_1.Op != OpConst32 { + if v_1.Op != OpConst64 { break } c3 := v_1.AuxInt - if !(uint32(c1) >= uint32(c2) && uint32(c3) >= uint32(c2)) { + if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { break } v.reset(OpLsh32x64) v.AddArg(x) - v0 := b.NewValue0(v.Line, OpConst32, config.fe.TypeUInt32()) - v0.AuxInt = int64(int32(c1 - c2 + c3)) + v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) + v0.AuxInt = c1 - c2 + c3 v.AddArg(v0) return true } @@ -4216,6 +4291,21 @@ func rewriteValuegeneric_OpLsh32x8(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Lsh32x8 (Const32 [0]) _) + // cond: + // result: (Const32 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst32 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst32) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpLsh64x16(v *Value, config *Config) bool { @@ -4239,6 +4329,21 @@ func rewriteValuegeneric_OpLsh64x16(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Lsh64x16 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst64 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpLsh64x32(v *Value, config *Config) bool { @@ -4262,6 +4367,21 @@ func rewriteValuegeneric_OpLsh64x32(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Lsh64x32 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst64 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpLsh64x64(v *Value, config *Config) bool { @@ -4364,7 +4484,7 @@ func rewriteValuegeneric_OpLsh64x64(v *Value, config *Config) bool { return true } // match: (Lsh64x64 (Rsh64Ux64 (Lsh64x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) - // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) + // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) // result: (Lsh64x64 x (Const64 [c1-c2+c3])) for { v_0 := v.Args[0] @@ -4391,7 +4511,7 @@ func rewriteValuegeneric_OpLsh64x64(v *Value, config *Config) bool { break } c3 := v_1.AuxInt - if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2)) { + if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { break } v.reset(OpLsh64x64) @@ -4424,6 +4544,21 @@ func rewriteValuegeneric_OpLsh64x8(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Lsh64x8 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst64 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpLsh8x16(v *Value, config *Config) bool { @@ -4447,6 +4582,21 @@ func rewriteValuegeneric_OpLsh8x16(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Lsh8x16 (Const8 [0]) _) + // cond: + // result: (Const8 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst8 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst8) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpLsh8x32(v *Value, config *Config) bool { @@ -4470,6 +4620,21 @@ func rewriteValuegeneric_OpLsh8x32(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Lsh8x32 (Const8 [0]) _) + // cond: + // result: (Const8 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst8 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst8) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpLsh8x64(v *Value, config *Config) bool { @@ -4510,12 +4675,12 @@ func rewriteValuegeneric_OpLsh8x64(v *Value, config *Config) bool { v.AddArg(x) return true } - // match: (Lsh8x64 (Const64 [0]) _) + // match: (Lsh8x64 (Const8 [0]) _) // cond: // result: (Const8 [0]) for { v_0 := v.Args[0] - if v_0.Op != OpConst64 { + if v_0.Op != OpConst8 { break } if v_0.AuxInt != 0 { @@ -4571,9 +4736,9 @@ func rewriteValuegeneric_OpLsh8x64(v *Value, config *Config) bool { v.AddArg(v0) return true } - // match: (Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const8 [c1])) (Const8 [c2])) (Const8 [c3])) - // cond: uint8(c1) >= uint8(c2) && uint8(c3) >= uint8(c2) - // result: (Lsh8x64 x (Const8 [int64(int8(c1-c2+c3))])) + // match: (Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) + // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) + // result: (Lsh8x64 x (Const64 [c1-c2+c3])) for { v_0 := v.Args[0] if v_0.Op != OpRsh8Ux64 { @@ -4585,27 +4750,27 @@ func rewriteValuegeneric_OpLsh8x64(v *Value, config *Config) bool { } x := v_0_0.Args[0] v_0_0_1 := v_0_0.Args[1] - if v_0_0_1.Op != OpConst8 { + if v_0_0_1.Op != OpConst64 { break } c1 := v_0_0_1.AuxInt v_0_1 := v_0.Args[1] - if v_0_1.Op != OpConst8 { + if v_0_1.Op != OpConst64 { break } c2 := v_0_1.AuxInt v_1 := v.Args[1] - if v_1.Op != OpConst8 { + if v_1.Op != OpConst64 { break } c3 := v_1.AuxInt - if !(uint8(c1) >= uint8(c2) && uint8(c3) >= uint8(c2)) { + if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { break } v.reset(OpLsh8x64) v.AddArg(x) - v0 := b.NewValue0(v.Line, OpConst8, config.fe.TypeUInt8()) - v0.AuxInt = int64(int8(c1 - c2 + c3)) + v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) + v0.AuxInt = c1 - c2 + c3 v.AddArg(v0) return true } @@ -4632,6 +4797,21 @@ func rewriteValuegeneric_OpLsh8x8(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Lsh8x8 (Const8 [0]) _) + // cond: + // result: (Const8 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst8 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst8) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpMod16(v *Value, config *Config) bool { @@ -6600,6 +6780,21 @@ func rewriteValuegeneric_OpRsh16Ux16(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh16Ux16 (Const16 [0]) _) + // cond: + // result: (Const16 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst16 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst16) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpRsh16Ux32(v *Value, config *Config) bool { @@ -6623,6 +6818,21 @@ func rewriteValuegeneric_OpRsh16Ux32(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh16Ux32 (Const16 [0]) _) + // cond: + // result: (Const16 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst16 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst16) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpRsh16Ux64(v *Value, config *Config) bool { @@ -6663,12 +6873,12 @@ func rewriteValuegeneric_OpRsh16Ux64(v *Value, config *Config) bool { v.AddArg(x) return true } - // match: (Rsh16Ux64 (Const64 [0]) _) + // match: (Rsh16Ux64 (Const16 [0]) _) // cond: // result: (Const16 [0]) for { v_0 := v.Args[0] - if v_0.Op != OpConst64 { + if v_0.Op != OpConst16 { break } if v_0.AuxInt != 0 { @@ -6724,9 +6934,9 @@ func rewriteValuegeneric_OpRsh16Ux64(v *Value, config *Config) bool { v.AddArg(v0) return true } - // match: (Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const16 [c1])) (Const16 [c2])) (Const16 [c3])) - // cond: uint16(c1) >= uint16(c2) && uint16(c3) >= uint16(c2) - // result: (Rsh16Ux64 x (Const16 [int64(int16(c1-c2+c3))])) + // match: (Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) + // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) + // result: (Rsh16Ux64 x (Const64 [c1-c2+c3])) for { v_0 := v.Args[0] if v_0.Op != OpLsh16x64 { @@ -6738,27 +6948,27 @@ func rewriteValuegeneric_OpRsh16Ux64(v *Value, config *Config) bool { } x := v_0_0.Args[0] v_0_0_1 := v_0_0.Args[1] - if v_0_0_1.Op != OpConst16 { + if v_0_0_1.Op != OpConst64 { break } c1 := v_0_0_1.AuxInt v_0_1 := v_0.Args[1] - if v_0_1.Op != OpConst16 { + if v_0_1.Op != OpConst64 { break } c2 := v_0_1.AuxInt v_1 := v.Args[1] - if v_1.Op != OpConst16 { + if v_1.Op != OpConst64 { break } c3 := v_1.AuxInt - if !(uint16(c1) >= uint16(c2) && uint16(c3) >= uint16(c2)) { + if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { break } v.reset(OpRsh16Ux64) v.AddArg(x) - v0 := b.NewValue0(v.Line, OpConst16, config.fe.TypeUInt16()) - v0.AuxInt = int64(int16(c1 - c2 + c3)) + v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) + v0.AuxInt = c1 - c2 + c3 v.AddArg(v0) return true } @@ -6785,6 +6995,21 @@ func rewriteValuegeneric_OpRsh16Ux8(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh16Ux8 (Const16 [0]) _) + // cond: + // result: (Const16 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst16 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst16) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpRsh16x16(v *Value, config *Config) bool { @@ -6808,6 +7033,21 @@ func rewriteValuegeneric_OpRsh16x16(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh16x16 (Const16 [0]) _) + // cond: + // result: (Const16 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst16 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst16) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpRsh16x32(v *Value, config *Config) bool { @@ -6831,6 +7071,21 @@ func rewriteValuegeneric_OpRsh16x32(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh16x32 (Const16 [0]) _) + // cond: + // result: (Const16 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst16 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst16) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpRsh16x64(v *Value, config *Config) bool { @@ -6871,12 +7126,12 @@ func rewriteValuegeneric_OpRsh16x64(v *Value, config *Config) bool { v.AddArg(x) return true } - // match: (Rsh16x64 (Const64 [0]) _) + // match: (Rsh16x64 (Const16 [0]) _) // cond: // result: (Const16 [0]) for { v_0 := v.Args[0] - if v_0.Op != OpConst64 { + if v_0.Op != OpConst16 { break } if v_0.AuxInt != 0 { @@ -6939,6 +7194,21 @@ func rewriteValuegeneric_OpRsh16x8(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh16x8 (Const16 [0]) _) + // cond: + // result: (Const16 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst16 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst16) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpRsh32Ux16(v *Value, config *Config) bool { @@ -6962,6 +7232,21 @@ func rewriteValuegeneric_OpRsh32Ux16(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh32Ux16 (Const32 [0]) _) + // cond: + // result: (Const32 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst32 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst32) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpRsh32Ux32(v *Value, config *Config) bool { @@ -6985,6 +7270,21 @@ func rewriteValuegeneric_OpRsh32Ux32(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh32Ux32 (Const32 [0]) _) + // cond: + // result: (Const32 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst32 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst32) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpRsh32Ux64(v *Value, config *Config) bool { @@ -7025,12 +7325,12 @@ func rewriteValuegeneric_OpRsh32Ux64(v *Value, config *Config) bool { v.AddArg(x) return true } - // match: (Rsh32Ux64 (Const64 [0]) _) + // match: (Rsh32Ux64 (Const32 [0]) _) // cond: // result: (Const32 [0]) for { v_0 := v.Args[0] - if v_0.Op != OpConst64 { + if v_0.Op != OpConst32 { break } if v_0.AuxInt != 0 { @@ -7086,9 +7386,9 @@ func rewriteValuegeneric_OpRsh32Ux64(v *Value, config *Config) bool { v.AddArg(v0) return true } - // match: (Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const32 [c1])) (Const32 [c2])) (Const32 [c3])) - // cond: uint32(c1) >= uint32(c2) && uint32(c3) >= uint32(c2) - // result: (Rsh32Ux64 x (Const32 [int64(int32(c1-c2+c3))])) + // match: (Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) + // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) + // result: (Rsh32Ux64 x (Const64 [c1-c2+c3])) for { v_0 := v.Args[0] if v_0.Op != OpLsh32x64 { @@ -7100,27 +7400,27 @@ func rewriteValuegeneric_OpRsh32Ux64(v *Value, config *Config) bool { } x := v_0_0.Args[0] v_0_0_1 := v_0_0.Args[1] - if v_0_0_1.Op != OpConst32 { + if v_0_0_1.Op != OpConst64 { break } c1 := v_0_0_1.AuxInt v_0_1 := v_0.Args[1] - if v_0_1.Op != OpConst32 { + if v_0_1.Op != OpConst64 { break } c2 := v_0_1.AuxInt v_1 := v.Args[1] - if v_1.Op != OpConst32 { + if v_1.Op != OpConst64 { break } c3 := v_1.AuxInt - if !(uint32(c1) >= uint32(c2) && uint32(c3) >= uint32(c2)) { + if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { break } v.reset(OpRsh32Ux64) v.AddArg(x) - v0 := b.NewValue0(v.Line, OpConst32, config.fe.TypeUInt32()) - v0.AuxInt = int64(int32(c1 - c2 + c3)) + v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) + v0.AuxInt = c1 - c2 + c3 v.AddArg(v0) return true } @@ -7147,6 +7447,21 @@ func rewriteValuegeneric_OpRsh32Ux8(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh32Ux8 (Const32 [0]) _) + // cond: + // result: (Const32 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst32 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst32) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpRsh32x16(v *Value, config *Config) bool { @@ -7170,6 +7485,21 @@ func rewriteValuegeneric_OpRsh32x16(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh32x16 (Const32 [0]) _) + // cond: + // result: (Const32 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst32 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst32) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpRsh32x32(v *Value, config *Config) bool { @@ -7193,6 +7523,21 @@ func rewriteValuegeneric_OpRsh32x32(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh32x32 (Const32 [0]) _) + // cond: + // result: (Const32 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst32 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst32) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpRsh32x64(v *Value, config *Config) bool { @@ -7233,12 +7578,12 @@ func rewriteValuegeneric_OpRsh32x64(v *Value, config *Config) bool { v.AddArg(x) return true } - // match: (Rsh32x64 (Const64 [0]) _) + // match: (Rsh32x64 (Const32 [0]) _) // cond: // result: (Const32 [0]) for { v_0 := v.Args[0] - if v_0.Op != OpConst64 { + if v_0.Op != OpConst32 { break } if v_0.AuxInt != 0 { @@ -7301,6 +7646,21 @@ func rewriteValuegeneric_OpRsh32x8(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh32x8 (Const32 [0]) _) + // cond: + // result: (Const32 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst32 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst32) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpRsh64Ux16(v *Value, config *Config) bool { @@ -7324,6 +7684,21 @@ func rewriteValuegeneric_OpRsh64Ux16(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh64Ux16 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst64 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpRsh64Ux32(v *Value, config *Config) bool { @@ -7347,6 +7722,21 @@ func rewriteValuegeneric_OpRsh64Ux32(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh64Ux32 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst64 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpRsh64Ux64(v *Value, config *Config) bool { @@ -7449,7 +7839,7 @@ func rewriteValuegeneric_OpRsh64Ux64(v *Value, config *Config) bool { return true } // match: (Rsh64Ux64 (Lsh64x64 (Rsh64Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) - // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) + // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) // result: (Rsh64Ux64 x (Const64 [c1-c2+c3])) for { v_0 := v.Args[0] @@ -7476,7 +7866,7 @@ func rewriteValuegeneric_OpRsh64Ux64(v *Value, config *Config) bool { break } c3 := v_1.AuxInt - if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2)) { + if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { break } v.reset(OpRsh64Ux64) @@ -7509,6 +7899,21 @@ func rewriteValuegeneric_OpRsh64Ux8(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh64Ux8 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst64 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpRsh64x16(v *Value, config *Config) bool { @@ -7532,6 +7937,21 @@ func rewriteValuegeneric_OpRsh64x16(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh64x16 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst64 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpRsh64x32(v *Value, config *Config) bool { @@ -7555,6 +7975,21 @@ func rewriteValuegeneric_OpRsh64x32(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh64x32 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst64 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpRsh64x64(v *Value, config *Config) bool { @@ -7663,6 +8098,21 @@ func rewriteValuegeneric_OpRsh64x8(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh64x8 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst64 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpRsh8Ux16(v *Value, config *Config) bool { @@ -7686,6 +8136,21 @@ func rewriteValuegeneric_OpRsh8Ux16(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh8Ux16 (Const8 [0]) _) + // cond: + // result: (Const8 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst8 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst8) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpRsh8Ux32(v *Value, config *Config) bool { @@ -7709,6 +8174,21 @@ func rewriteValuegeneric_OpRsh8Ux32(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh8Ux32 (Const8 [0]) _) + // cond: + // result: (Const8 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst8 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst8) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpRsh8Ux64(v *Value, config *Config) bool { @@ -7749,12 +8229,12 @@ func rewriteValuegeneric_OpRsh8Ux64(v *Value, config *Config) bool { v.AddArg(x) return true } - // match: (Rsh8Ux64 (Const64 [0]) _) + // match: (Rsh8Ux64 (Const8 [0]) _) // cond: // result: (Const8 [0]) for { v_0 := v.Args[0] - if v_0.Op != OpConst64 { + if v_0.Op != OpConst8 { break } if v_0.AuxInt != 0 { @@ -7810,9 +8290,9 @@ func rewriteValuegeneric_OpRsh8Ux64(v *Value, config *Config) bool { v.AddArg(v0) return true } - // match: (Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const8 [c1])) (Const8 [c2])) (Const8 [c3])) - // cond: uint8(c1) >= uint8(c2) && uint8(c3) >= uint8(c2) - // result: (Rsh8Ux64 x (Const8 [int64(int8(c1-c2+c3))])) + // match: (Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) + // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) + // result: (Rsh8Ux64 x (Const64 [c1-c2+c3])) for { v_0 := v.Args[0] if v_0.Op != OpLsh8x64 { @@ -7824,27 +8304,27 @@ func rewriteValuegeneric_OpRsh8Ux64(v *Value, config *Config) bool { } x := v_0_0.Args[0] v_0_0_1 := v_0_0.Args[1] - if v_0_0_1.Op != OpConst8 { + if v_0_0_1.Op != OpConst64 { break } c1 := v_0_0_1.AuxInt v_0_1 := v_0.Args[1] - if v_0_1.Op != OpConst8 { + if v_0_1.Op != OpConst64 { break } c2 := v_0_1.AuxInt v_1 := v.Args[1] - if v_1.Op != OpConst8 { + if v_1.Op != OpConst64 { break } c3 := v_1.AuxInt - if !(uint8(c1) >= uint8(c2) && uint8(c3) >= uint8(c2)) { + if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { break } v.reset(OpRsh8Ux64) v.AddArg(x) - v0 := b.NewValue0(v.Line, OpConst8, config.fe.TypeUInt8()) - v0.AuxInt = int64(int8(c1 - c2 + c3)) + v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) + v0.AuxInt = c1 - c2 + c3 v.AddArg(v0) return true } @@ -7871,6 +8351,21 @@ func rewriteValuegeneric_OpRsh8Ux8(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh8Ux8 (Const8 [0]) _) + // cond: + // result: (Const8 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst8 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst8) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpRsh8x16(v *Value, config *Config) bool { @@ -7894,6 +8389,21 @@ func rewriteValuegeneric_OpRsh8x16(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh8x16 (Const8 [0]) _) + // cond: + // result: (Const8 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst8 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst8) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpRsh8x32(v *Value, config *Config) bool { @@ -7917,6 +8427,21 @@ func rewriteValuegeneric_OpRsh8x32(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh8x32 (Const8 [0]) _) + // cond: + // result: (Const8 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst8 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst8) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpRsh8x64(v *Value, config *Config) bool { @@ -7957,12 +8482,12 @@ func rewriteValuegeneric_OpRsh8x64(v *Value, config *Config) bool { v.AddArg(x) return true } - // match: (Rsh8x64 (Const64 [0]) _) + // match: (Rsh8x64 (Const8 [0]) _) // cond: // result: (Const8 [0]) for { v_0 := v.Args[0] - if v_0.Op != OpConst64 { + if v_0.Op != OpConst8 { break } if v_0.AuxInt != 0 { @@ -8025,6 +8550,21 @@ func rewriteValuegeneric_OpRsh8x8(v *Value, config *Config) bool { v.AddArg(v0) return true } + // match: (Rsh8x8 (Const8 [0]) _) + // cond: + // result: (Const8 [0]) + for { + v_0 := v.Args[0] + if v_0.Op != OpConst8 { + break + } + if v_0.AuxInt != 0 { + break + } + v.reset(OpConst8) + v.AuxInt = 0 + return true + } return false } func rewriteValuegeneric_OpSliceCap(v *Value, config *Config) bool {