(LGDR (LDGR x)) => x
// Don't extend before storing
-(MOVWstore [off] {sym} ptr (MOVWreg x) mem) -> (MOVWstore [off] {sym} ptr x mem)
-(MOVHstore [off] {sym} ptr (MOVHreg x) mem) -> (MOVHstore [off] {sym} ptr x mem)
-(MOVBstore [off] {sym} ptr (MOVBreg x) mem) -> (MOVBstore [off] {sym} ptr x mem)
-(MOVWstore [off] {sym} ptr (MOVWZreg x) mem) -> (MOVWstore [off] {sym} ptr x mem)
-(MOVHstore [off] {sym} ptr (MOVHZreg x) mem) -> (MOVHstore [off] {sym} ptr x mem)
-(MOVBstore [off] {sym} ptr (MOVBZreg x) mem) -> (MOVBstore [off] {sym} ptr x mem)
+(MOVWstore [off] {sym} ptr (MOVWreg x) mem) => (MOVWstore [off] {sym} ptr x mem)
+(MOVHstore [off] {sym} ptr (MOVHreg x) mem) => (MOVHstore [off] {sym} ptr x mem)
+(MOVBstore [off] {sym} ptr (MOVBreg x) mem) => (MOVBstore [off] {sym} ptr x mem)
+(MOVWstore [off] {sym} ptr (MOVWZreg x) mem) => (MOVWstore [off] {sym} ptr x mem)
+(MOVHstore [off] {sym} ptr (MOVHZreg x) mem) => (MOVHstore [off] {sym} ptr x mem)
+(MOVBstore [off] {sym} ptr (MOVBZreg x) mem) => (MOVBstore [off] {sym} ptr x mem)
// Fold constants into memory operations.
// Note that this is not always a good idea because if not all the uses of
(FMOVDstore [off1] {sym1} (MOVDaddridx [off2] {sym2} ptr idx) val mem) && is32Bit(off1+off2) && canMergeSym(sym1, sym2) ->
(FMOVDstoreidx [off1+off2] {mergeSym(sym1,sym2)} ptr idx val mem)
-(MOVBZload [off] {sym} (ADD ptr idx) mem) && ptr.Op != OpSB -> (MOVBZloadidx [off] {sym} ptr idx mem)
-(MOVBload [off] {sym} (ADD ptr idx) mem) && ptr.Op != OpSB -> (MOVBloadidx [off] {sym} ptr idx mem)
-(MOVHZload [off] {sym} (ADD ptr idx) mem) && ptr.Op != OpSB -> (MOVHZloadidx [off] {sym} ptr idx mem)
-(MOVHload [off] {sym} (ADD ptr idx) mem) && ptr.Op != OpSB -> (MOVHloadidx [off] {sym} ptr idx mem)
-(MOVWZload [off] {sym} (ADD ptr idx) mem) && ptr.Op != OpSB -> (MOVWZloadidx [off] {sym} ptr idx mem)
-(MOVWload [off] {sym} (ADD ptr idx) mem) && ptr.Op != OpSB -> (MOVWloadidx [off] {sym} ptr idx mem)
-(MOVDload [off] {sym} (ADD ptr idx) mem) && ptr.Op != OpSB -> (MOVDloadidx [off] {sym} ptr idx mem)
-(FMOVSload [off] {sym} (ADD ptr idx) mem) && ptr.Op != OpSB -> (FMOVSloadidx [off] {sym} ptr idx mem)
-(FMOVDload [off] {sym} (ADD ptr idx) mem) && ptr.Op != OpSB -> (FMOVDloadidx [off] {sym} ptr idx mem)
-
-(MOVBstore [off] {sym} (ADD ptr idx) val mem) && ptr.Op != OpSB -> (MOVBstoreidx [off] {sym} ptr idx val mem)
-(MOVHstore [off] {sym} (ADD ptr idx) val mem) && ptr.Op != OpSB -> (MOVHstoreidx [off] {sym} ptr idx val mem)
-(MOVWstore [off] {sym} (ADD ptr idx) val mem) && ptr.Op != OpSB -> (MOVWstoreidx [off] {sym} ptr idx val mem)
-(MOVDstore [off] {sym} (ADD ptr idx) val mem) && ptr.Op != OpSB -> (MOVDstoreidx [off] {sym} ptr idx val mem)
-(FMOVSstore [off] {sym} (ADD ptr idx) val mem) && ptr.Op != OpSB -> (FMOVSstoreidx [off] {sym} ptr idx val mem)
-(FMOVDstore [off] {sym} (ADD ptr idx) val mem) && ptr.Op != OpSB -> (FMOVDstoreidx [off] {sym} ptr idx val mem)
+(MOVBZload [off] {sym} (ADD ptr idx) mem) && ptr.Op != OpSB => (MOVBZloadidx [off] {sym} ptr idx mem)
+(MOVBload [off] {sym} (ADD ptr idx) mem) && ptr.Op != OpSB => (MOVBloadidx [off] {sym} ptr idx mem)
+(MOVHZload [off] {sym} (ADD ptr idx) mem) && ptr.Op != OpSB => (MOVHZloadidx [off] {sym} ptr idx mem)
+(MOVHload [off] {sym} (ADD ptr idx) mem) && ptr.Op != OpSB => (MOVHloadidx [off] {sym} ptr idx mem)
+(MOVWZload [off] {sym} (ADD ptr idx) mem) && ptr.Op != OpSB => (MOVWZloadidx [off] {sym} ptr idx mem)
+(MOVWload [off] {sym} (ADD ptr idx) mem) && ptr.Op != OpSB => (MOVWloadidx [off] {sym} ptr idx mem)
+(MOVDload [off] {sym} (ADD ptr idx) mem) && ptr.Op != OpSB => (MOVDloadidx [off] {sym} ptr idx mem)
+(FMOVSload [off] {sym} (ADD ptr idx) mem) && ptr.Op != OpSB => (FMOVSloadidx [off] {sym} ptr idx mem)
+(FMOVDload [off] {sym} (ADD ptr idx) mem) && ptr.Op != OpSB => (FMOVDloadidx [off] {sym} ptr idx mem)
+
+(MOVBstore [off] {sym} (ADD ptr idx) val mem) && ptr.Op != OpSB => (MOVBstoreidx [off] {sym} ptr idx val mem)
+(MOVHstore [off] {sym} (ADD ptr idx) val mem) && ptr.Op != OpSB => (MOVHstoreidx [off] {sym} ptr idx val mem)
+(MOVWstore [off] {sym} (ADD ptr idx) val mem) && ptr.Op != OpSB => (MOVWstoreidx [off] {sym} ptr idx val mem)
+(MOVDstore [off] {sym} (ADD ptr idx) val mem) && ptr.Op != OpSB => (MOVDstoreidx [off] {sym} ptr idx val mem)
+(FMOVSstore [off] {sym} (ADD ptr idx) val mem) && ptr.Op != OpSB => (FMOVSstoreidx [off] {sym} ptr idx val mem)
+(FMOVDstore [off] {sym} (ADD ptr idx) val mem) && ptr.Op != OpSB => (FMOVDstoreidx [off] {sym} ptr idx val mem)
// combine ADD into indexed loads and stores
(MOVBZloadidx [c] {sym} (ADDconst [d] ptr) idx mem) && is20Bit(c+d) -> (MOVBZloadidx [c+d] {sym} ptr idx mem)
(CMPconst (MOVDconst [x]) [y]) && x==y -> (FlagEQ)
(CMPconst (MOVDconst [x]) [y]) && x<y -> (FlagLT)
(CMPconst (MOVDconst [x]) [y]) && x>y -> (FlagGT)
-(CMPUconst (MOVDconst [x]) [y]) && uint64(x)==uint64(y) -> (FlagEQ)
-(CMPUconst (MOVDconst [x]) [y]) && uint64(x)<uint64(y) -> (FlagLT)
-(CMPUconst (MOVDconst [x]) [y]) && uint64(x)>uint64(y) -> (FlagGT)
+(CMPUconst (MOVDconst [x]) [y]) && uint64(x)==uint64(y) => (FlagEQ)
+(CMPUconst (MOVDconst [x]) [y]) && uint64(x)<uint64(y) => (FlagLT)
+(CMPUconst (MOVDconst [x]) [y]) && uint64(x)>uint64(y) => (FlagGT)
-(CMPWconst (MOVDconst [x]) [y]) && int32(x)==int32(y) -> (FlagEQ)
-(CMPWconst (MOVDconst [x]) [y]) && int32(x)<int32(y) -> (FlagLT)
-(CMPWconst (MOVDconst [x]) [y]) && int32(x)>int32(y) -> (FlagGT)
-(CMPWUconst (MOVDconst [x]) [y]) && uint32(x)==uint32(y) -> (FlagEQ)
-(CMPWUconst (MOVDconst [x]) [y]) && uint32(x)<uint32(y) -> (FlagLT)
-(CMPWUconst (MOVDconst [x]) [y]) && uint32(x)>uint32(y) -> (FlagGT)
+(CMPWconst (MOVDconst [x]) [y]) && int32(x)==int32(y) => (FlagEQ)
+(CMPWconst (MOVDconst [x]) [y]) && int32(x)<int32(y) => (FlagLT)
+(CMPWconst (MOVDconst [x]) [y]) && int32(x)>int32(y) => (FlagGT)
+(CMPWUconst (MOVDconst [x]) [y]) && uint32(x)==uint32(y) => (FlagEQ)
+(CMPWUconst (MOVDconst [x]) [y]) && uint32(x)<uint32(y) => (FlagLT)
+(CMPWUconst (MOVDconst [x]) [y]) && uint32(x)>uint32(y) => (FlagGT)
-(CMP(W|WU)const (MOVBZreg _) [c]) && 0xff < c -> (FlagLT)
-(CMP(W|WU)const (MOVHZreg _) [c]) && 0xffff < c -> (FlagLT)
+(CMP(W|WU)const (MOVBZreg _) [c]) && 0xff < c => (FlagLT)
+(CMP(W|WU)const (MOVHZreg _) [c]) && 0xffff < c => (FlagLT)
-(CMPconst (SRDconst _ [c]) [n]) && c > 0 && n < 0 -> (FlagGT)
-(CMPWconst (SRWconst _ [c]) [n]) && c > 0 && n < 0 -> (FlagGT)
+(CMPconst (SRDconst _ [c]) [n]) && c > 0 && n < 0 => (FlagGT)
+(CMPWconst (SRWconst _ [c]) [n]) && c > 0 && n < 0 => (FlagGT)
-(CMPUconst (SRDconst _ [c]) [n]) && c > 0 && c < 64 && (1<<uint(64-c)) <= uint64(n) -> (FlagLT)
-(CMPWUconst (SRWconst _ [c]) [n]) && c > 0 && c < 32 && (1<<uint(32-c)) <= uint32(n) -> (FlagLT)
+(CMPUconst (SRDconst _ [c]) [n]) && c > 0 && c < 64 && (1<<uint(64-c)) <= uint64(n) => (FlagLT)
+(CMPWUconst (SRWconst _ [c]) [n]) && c > 0 && c < 32 && (1<<uint(32-c)) <= uint32(n) => (FlagLT)
-(CMPWconst (ANDWconst _ [m]) [n]) && int32(m) >= 0 && int32(m) < int32(n) -> (FlagLT)
-(CMPWUconst (ANDWconst _ [m]) [n]) && uint32(m) < uint32(n) -> (FlagLT)
+(CMPWconst (ANDWconst _ [m]) [n]) && int32(m) >= 0 && int32(m) < int32(n) => (FlagLT)
+(CMPWUconst (ANDWconst _ [m]) [n]) && uint32(m) < uint32(n) => (FlagLT)
// Constant compare-and-branch with immediate.
(CGIJ {c} (MOVDconst [x]) [y] yes no) && c&s390x.Equal != 0 && int64(x) == int64(y) => (First yes no)
// Convert 64-bit comparisons to 32-bit comparisons and signed comparisons
// to unsigned comparisons.
// Helps simplify constant comparison detection.
-(CM(P|PU)const (MOV(W|WZ)reg x) [c]) -> (CMP(W|WU)const x [c])
-(CM(P|P|PU|PU)const x:(MOV(H|HZ|H|HZ)reg _) [c]) -> (CMP(W|W|WU|WU)const x [c])
-(CM(P|P|PU|PU)const x:(MOV(B|BZ|B|BZ)reg _) [c]) -> (CMP(W|W|WU|WU)const x [c])
-(CMPconst (MOV(WZ|W)reg x:(ANDWconst [m] _)) [c]) && int32(m) >= 0 && c >= 0 -> (CMPWUconst x [c])
-(CMPUconst (MOV(WZ|W)reg x:(ANDWconst [m] _)) [c]) && int32(m) >= 0 -> (CMPWUconst x [c])
-(CMPconst x:(SRDconst _ [c]) [n]) && c > 0 && n >= 0 -> (CMPUconst x [n])
-(CMPWconst x:(SRWconst _ [c]) [n]) && c > 0 && n >= 0 -> (CMPWUconst x [n])
+(CM(P|PU)const (MOV(W|WZ)reg x) [c]) => (CMP(W|WU)const x [c])
+(CM(P|P|PU|PU)const x:(MOV(H|HZ|H|HZ)reg _) [c]) => (CMP(W|W|WU|WU)const x [c])
+(CM(P|P|PU|PU)const x:(MOV(B|BZ|B|BZ)reg _) [c]) => (CMP(W|W|WU|WU)const x [c])
+(CMPconst (MOV(WZ|W)reg x:(ANDWconst [m] _)) [c]) && int32(m) >= 0 && c >= 0 => (CMPWUconst x [c])
+(CMPUconst (MOV(WZ|W)reg x:(ANDWconst [m] _)) [c]) && int32(m) >= 0 => (CMPWUconst x [c])
+(CMPconst x:(SRDconst _ [c]) [n]) && c > 0 && n >= 0 => (CMPUconst x [n])
+(CMPWconst x:(SRWconst _ [c]) [n]) && c > 0 && n >= 0 => (CMPWUconst x [n])
// Absorb sign and zero extensions into 32-bit comparisons.
-(CMP(W|W|WU|WU) x (MOV(W|WZ|W|WZ)reg y)) -> (CMP(W|W|WU|WU) x y)
-(CMP(W|W|WU|WU) (MOV(W|WZ|W|WZ)reg x) y) -> (CMP(W|W|WU|WU) x y)
-(CMP(W|W|WU|WU)const (MOV(W|WZ|W|WZ)reg x) [c]) -> (CMP(W|W|WU|WU)const x [c])
+(CMP(W|W|WU|WU) x (MOV(W|WZ|W|WZ)reg y)) => (CMP(W|W|WU|WU) x y)
+(CMP(W|W|WU|WU) (MOV(W|WZ|W|WZ)reg x) y) => (CMP(W|W|WU|WU) x y)
+(CMP(W|W|WU|WU)const (MOV(W|WZ|W|WZ)reg x) [c]) => (CMP(W|W|WU|WU)const x [c])
// Absorb flag constants into branches.
(BRC {c} (FlagEQ) yes no) && c&s390x.Equal != 0 => (First yes no)
(LOCGR {c} x _ (FlagOV)) && c&s390x.Unordered == 0 => x
// Remove redundant *const ops
-(ADDconst [0] x) -> x
-(ADDWconst [c] x) && int32(c)==0 -> x
-(SUBconst [0] x) -> x
-(SUBWconst [c] x) && int32(c) == 0 -> x
-(ANDconst [0] _) -> (MOVDconst [0])
-(ANDWconst [c] _) && int32(c)==0 -> (MOVDconst [0])
-(ANDconst [-1] x) -> x
-(ANDWconst [c] x) && int32(c)==-1 -> x
-(ORconst [0] x) -> x
-(ORWconst [c] x) && int32(c)==0 -> x
-(ORconst [-1] _) -> (MOVDconst [-1])
-(ORWconst [c] _) && int32(c)==-1 -> (MOVDconst [-1])
-(XORconst [0] x) -> x
-(XORWconst [c] x) && int32(c)==0 -> x
+(ADDconst [0] x) => x
+(ADDWconst [c] x) && int32(c)==0 => x
+(SUBconst [0] x) => x
+(SUBWconst [c] x) && int32(c) == 0 => x
+(ANDconst [0] _) => (MOVDconst [0])
+(ANDWconst [c] _) && int32(c)==0 => (MOVDconst [0])
+(ANDconst [-1] x) => x
+(ANDWconst [c] x) && int32(c)==-1 => x
+(ORconst [0] x) => x
+(ORWconst [c] x) && int32(c)==0 => x
+(ORconst [-1] _) => (MOVDconst [-1])
+(ORWconst [c] _) && int32(c)==-1 => (MOVDconst [-1])
+(XORconst [0] x) => x
+(XORWconst [c] x) && int32(c)==0 => x
// Convert constant subtracts to constant adds.
-(SUBconst [c] x) && c != -(1<<31) -> (ADDconst [-c] x)
+(SUBconst [c] x) && c != -(1<<31) => (ADDconst [-c] x)
(SUBWconst [c] x) -> (ADDWconst [int64(int32(-c))] x)
// generic constant folding
(ADDWconst [c] (ADDWconst [d] x)) -> (ADDWconst [int64(int32(c+d))] x)
(SUBconst (MOVDconst [d]) [c]) -> (MOVDconst [d-c])
(SUBconst (SUBconst x [d]) [c]) && is32Bit(-c-d) -> (ADDconst [-c-d] x)
-(SRADconst [c] (MOVDconst [d])) -> (MOVDconst [d>>uint64(c)])
-(SRAWconst [c] (MOVDconst [d])) -> (MOVDconst [int64(int32(d))>>uint64(c)])
-(NEG (MOVDconst [c])) -> (MOVDconst [-c])
-(NEGW (MOVDconst [c])) -> (MOVDconst [int64(int32(-c))])
+(SRADconst [c] (MOVDconst [d])) => (MOVDconst [d>>uint64(c)])
+(SRAWconst [c] (MOVDconst [d])) => (MOVDconst [int64(int32(d))>>uint64(c)])
+(NEG (MOVDconst [c])) => (MOVDconst [-c])
+(NEGW (MOVDconst [c])) => (MOVDconst [int64(int32(-c))])
(MULLDconst [c] (MOVDconst [d])) -> (MOVDconst [c*d])
(MULLWconst [c] (MOVDconst [d])) -> (MOVDconst [int64(int32(c*d))])
-(AND (MOVDconst [c]) (MOVDconst [d])) -> (MOVDconst [c&d])
-(ANDconst [c] (MOVDconst [d])) -> (MOVDconst [c&d])
+(AND (MOVDconst [c]) (MOVDconst [d])) => (MOVDconst [c&d])
+(ANDconst [c] (MOVDconst [d])) => (MOVDconst [c&d])
(ANDWconst [c] (MOVDconst [d])) -> (MOVDconst [c&d])
-(OR (MOVDconst [c]) (MOVDconst [d])) -> (MOVDconst [c|d])
-(ORconst [c] (MOVDconst [d])) -> (MOVDconst [c|d])
+(OR (MOVDconst [c]) (MOVDconst [d])) => (MOVDconst [c|d])
+(ORconst [c] (MOVDconst [d])) => (MOVDconst [c|d])
(ORWconst [c] (MOVDconst [d])) -> (MOVDconst [c|d])
-(XOR (MOVDconst [c]) (MOVDconst [d])) -> (MOVDconst [c^d])
-(XORconst [c] (MOVDconst [d])) -> (MOVDconst [c^d])
+(XOR (MOVDconst [c]) (MOVDconst [d])) => (MOVDconst [c^d])
+(XORconst [c] (MOVDconst [d])) => (MOVDconst [c^d])
(XORWconst [c] (MOVDconst [d])) -> (MOVDconst [c^d])
-(LoweredRound32F x:(FMOVSconst)) -> x
-(LoweredRound64F x:(FMOVDconst)) -> x
+(LoweredRound32F x:(FMOVSconst)) => x
+(LoweredRound64F x:(FMOVDconst)) => x
// generic simplifications
// TODO: more of this
-(ADD x (NEG y)) -> (SUB x y)
-(ADDW x (NEGW y)) -> (SUBW x y)
-(SUB x x) -> (MOVDconst [0])
-(SUBW x x) -> (MOVDconst [0])
-(AND x x) -> x
-(ANDW x x) -> x
-(OR x x) -> x
-(ORW x x) -> x
-(XOR x x) -> (MOVDconst [0])
-(XORW x x) -> (MOVDconst [0])
-(NEG (ADDconst [c] (NEG x))) && c != -(1<<31) -> (ADDconst [-c] x)
+(ADD x (NEG y)) => (SUB x y)
+(ADDW x (NEGW y)) => (SUBW x y)
+(SUB x x) => (MOVDconst [0])
+(SUBW x x) => (MOVDconst [0])
+(AND x x) => x
+(ANDW x x) => x
+(OR x x) => x
+(ORW x x) => x
+(XOR x x) => (MOVDconst [0])
+(XORW x x) => (MOVDconst [0])
+(NEG (ADDconst [c] (NEG x))) && c != -(1<<31) => (ADDconst [-c] x)
(MOVBZreg (ANDWconst [m] x)) -> (MOVWZreg (ANDWconst <typ.UInt32> [int64( uint8(m))] x))
(MOVHZreg (ANDWconst [m] x)) -> (MOVWZreg (ANDWconst <typ.UInt32> [int64(uint16(m))] x))
(MOVBreg (ANDWconst [m] x)) && int8(m) >= 0 -> (MOVWZreg (ANDWconst <typ.UInt32> [int64( uint8(m))] x))
// (only constant fold borrow of zero)
(Select1 (SUBC (MOVDconst [c]) (MOVDconst [d])))
&& uint64(d) <= uint64(c) && c-d == 0
- -> (FlagGT)
+ => (FlagGT)
(Select1 (SUBC (MOVDconst [c]) (MOVDconst [d])))
&& uint64(d) <= uint64(c) && c-d != 0
- -> (FlagOV)
+ => (FlagOV)
// add with carry
-(ADDE x y (FlagEQ)) -> (ADDC x y)
-(ADDE x y (FlagLT)) -> (ADDC x y)
+(ADDE x y (FlagEQ)) => (ADDC x y)
+(ADDE x y (FlagLT)) => (ADDC x y)
(ADDC x (MOVDconst [c])) && is16Bit(c) -> (ADDCconst x [c])
(Select0 (ADDCconst (MOVDconst [c]) [d])) -> (MOVDconst [c+d])
// subtract with borrow
-(SUBE x y (FlagGT)) -> (SUBC x y)
-(SUBE x y (FlagOV)) -> (SUBC x y)
-(Select0 (SUBC (MOVDconst [c]) (MOVDconst [d]))) -> (MOVDconst [c-d])
+(SUBE x y (FlagGT)) => (SUBC x y)
+(SUBE x y (FlagOV)) => (SUBC x y)
+(Select0 (SUBC (MOVDconst [c]) (MOVDconst [d]))) => (MOVDconst [c-d])
// collapse carry chain
(ADDE x y (Select1 (ADDCconst [-1] (Select0 (ADDE (MOVDconst [0]) (MOVDconst [0]) c)))))
- -> (ADDE x y c)
+ => (ADDE x y c)
// collapse borrow chain
(SUBE x y (Select1 (SUBC (MOVDconst [0]) (NEG (Select0 (SUBE (MOVDconst [0]) (MOVDconst [0]) c))))))
- -> (SUBE x y c)
+ => (SUBE x y c)
// branch on carry
(C(G|LG)IJ {s390x.Equal} (Select0 (ADDE (MOVDconst [0]) (MOVDconst [0]) carry)) [0]) => (BRC {s390x.NoCarry} carry)
break
}
v_2_0 := v_2.Args[0]
- if v_2_0.Op != OpS390XADDCconst || v_2_0.AuxInt != -1 {
+ if v_2_0.Op != OpS390XADDCconst || auxIntToInt16(v_2_0.AuxInt) != -1 {
break
}
v_2_0_0 := v_2_0.Args[0]
}
c := v_2_0_0_0.Args[2]
v_2_0_0_0_0 := v_2_0_0_0.Args[0]
- if v_2_0_0_0_0.Op != OpS390XMOVDconst || v_2_0_0_0_0.AuxInt != 0 {
+ if v_2_0_0_0_0.Op != OpS390XMOVDconst || auxIntToInt64(v_2_0_0_0_0.AuxInt) != 0 {
break
}
v_2_0_0_0_1 := v_2_0_0_0.Args[1]
- if v_2_0_0_0_1.Op != OpS390XMOVDconst || v_2_0_0_0_1.AuxInt != 0 {
+ if v_2_0_0_0_1.Op != OpS390XMOVDconst || auxIntToInt64(v_2_0_0_0_1.AuxInt) != 0 {
break
}
v.reset(OpS390XADDE)
// cond: int32(c)==0
// result: x
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
x := v_0
if !(int32(c) == 0) {
break
// match: (ADDconst [0] x)
// result: x
for {
- if v.AuxInt != 0 {
+ if auxIntToInt32(v.AuxInt) != 0 {
break
}
x := v_0
if v_0.Op != OpS390XMOVDconst {
continue
}
- c := v_0.AuxInt
+ c := auxIntToInt64(v_0.AuxInt)
if v_1.Op != OpS390XMOVDconst {
continue
}
- d := v_1.AuxInt
+ d := auxIntToInt64(v_1.AuxInt)
v.reset(OpS390XMOVDconst)
- v.AuxInt = c & d
+ v.AuxInt = int64ToAuxInt(c & d)
return true
}
break
// cond: int32(c)==0
// result: (MOVDconst [0])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if !(int32(c) == 0) {
break
}
v.reset(OpS390XMOVDconst)
- v.AuxInt = 0
+ v.AuxInt = int64ToAuxInt(0)
return true
}
// match: (ANDWconst [c] x)
// cond: int32(c)==-1
// result: x
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
x := v_0
if !(int32(c) == -1) {
break
// match: (ANDconst [0] _)
// result: (MOVDconst [0])
for {
- if v.AuxInt != 0 {
+ if auxIntToInt64(v.AuxInt) != 0 {
break
}
v.reset(OpS390XMOVDconst)
- v.AuxInt = 0
+ v.AuxInt = int64ToAuxInt(0)
return true
}
// match: (ANDconst [-1] x)
// result: x
for {
- if v.AuxInt != -1 {
+ if auxIntToInt64(v.AuxInt) != -1 {
break
}
x := v_0
// match: (ANDconst [c] (MOVDconst [d]))
// result: (MOVDconst [c&d])
for {
- c := v.AuxInt
+ c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpS390XMOVDconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt64(v_0.AuxInt)
v.reset(OpS390XMOVDconst)
- v.AuxInt = c & d
+ v.AuxInt = int64ToAuxInt(c & d)
return true
}
return false
// cond: uint64(x)==uint64(y)
// result: (FlagEQ)
for {
- y := v.AuxInt
+ y := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XMOVDconst {
break
}
- x := v_0.AuxInt
+ x := auxIntToInt64(v_0.AuxInt)
if !(uint64(x) == uint64(y)) {
break
}
// cond: uint64(x)<uint64(y)
// result: (FlagLT)
for {
- y := v.AuxInt
+ y := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XMOVDconst {
break
}
- x := v_0.AuxInt
+ x := auxIntToInt64(v_0.AuxInt)
if !(uint64(x) < uint64(y)) {
break
}
// cond: uint64(x)>uint64(y)
// result: (FlagGT)
for {
- y := v.AuxInt
+ y := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XMOVDconst {
break
}
- x := v_0.AuxInt
+ x := auxIntToInt64(v_0.AuxInt)
if !(uint64(x) > uint64(y)) {
break
}
// cond: c > 0 && c < 64 && (1<<uint(64-c)) <= uint64(n)
// result: (FlagLT)
for {
- n := v.AuxInt
+ n := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XSRDconst {
break
}
- c := v_0.AuxInt
+ c := auxIntToInt8(v_0.AuxInt)
if !(c > 0 && c < 64 && (1<<uint(64-c)) <= uint64(n)) {
break
}
// match: (CMPUconst (MOVWZreg x) [c])
// result: (CMPWUconst x [c])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XMOVWZreg {
break
}
x := v_0.Args[0]
v.reset(OpS390XCMPWUconst)
- v.AuxInt = c
+ v.AuxInt = int32ToAuxInt(c)
v.AddArg(x)
return true
}
// match: (CMPUconst x:(MOVHreg _) [c])
// result: (CMPWUconst x [c])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
x := v_0
if x.Op != OpS390XMOVHreg {
break
}
v.reset(OpS390XCMPWUconst)
- v.AuxInt = c
+ v.AuxInt = int32ToAuxInt(c)
v.AddArg(x)
return true
}
// match: (CMPUconst x:(MOVHZreg _) [c])
// result: (CMPWUconst x [c])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
x := v_0
if x.Op != OpS390XMOVHZreg {
break
}
v.reset(OpS390XCMPWUconst)
- v.AuxInt = c
+ v.AuxInt = int32ToAuxInt(c)
v.AddArg(x)
return true
}
// match: (CMPUconst x:(MOVBreg _) [c])
// result: (CMPWUconst x [c])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
x := v_0
if x.Op != OpS390XMOVBreg {
break
}
v.reset(OpS390XCMPWUconst)
- v.AuxInt = c
+ v.AuxInt = int32ToAuxInt(c)
v.AddArg(x)
return true
}
// match: (CMPUconst x:(MOVBZreg _) [c])
// result: (CMPWUconst x [c])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
x := v_0
if x.Op != OpS390XMOVBZreg {
break
}
v.reset(OpS390XCMPWUconst)
- v.AuxInt = c
+ v.AuxInt = int32ToAuxInt(c)
v.AddArg(x)
return true
}
// cond: int32(m) >= 0
// result: (CMPWUconst x [c])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XMOVWZreg {
break
}
if x.Op != OpS390XANDWconst {
break
}
- m := x.AuxInt
+ m := auxIntToInt32(x.AuxInt)
if !(int32(m) >= 0) {
break
}
v.reset(OpS390XCMPWUconst)
- v.AuxInt = c
+ v.AuxInt = int32ToAuxInt(c)
v.AddArg(x)
return true
}
// cond: int32(m) >= 0
// result: (CMPWUconst x [c])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XMOVWreg {
break
}
if x.Op != OpS390XANDWconst {
break
}
- m := x.AuxInt
+ m := auxIntToInt32(x.AuxInt)
if !(int32(m) >= 0) {
break
}
v.reset(OpS390XCMPWUconst)
- v.AuxInt = c
+ v.AuxInt = int32ToAuxInt(c)
v.AddArg(x)
return true
}
// cond: uint32(x)==uint32(y)
// result: (FlagEQ)
for {
- y := v.AuxInt
+ y := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XMOVDconst {
break
}
- x := v_0.AuxInt
+ x := auxIntToInt64(v_0.AuxInt)
if !(uint32(x) == uint32(y)) {
break
}
// cond: uint32(x)<uint32(y)
// result: (FlagLT)
for {
- y := v.AuxInt
+ y := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XMOVDconst {
break
}
- x := v_0.AuxInt
+ x := auxIntToInt64(v_0.AuxInt)
if !(uint32(x) < uint32(y)) {
break
}
// cond: uint32(x)>uint32(y)
// result: (FlagGT)
for {
- y := v.AuxInt
+ y := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XMOVDconst {
break
}
- x := v_0.AuxInt
+ x := auxIntToInt64(v_0.AuxInt)
if !(uint32(x) > uint32(y)) {
break
}
// cond: 0xff < c
// result: (FlagLT)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XMOVBZreg || !(0xff < c) {
break
}
// cond: 0xffff < c
// result: (FlagLT)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XMOVHZreg || !(0xffff < c) {
break
}
// cond: c > 0 && c < 32 && (1<<uint(32-c)) <= uint32(n)
// result: (FlagLT)
for {
- n := v.AuxInt
+ n := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XSRWconst {
break
}
- c := v_0.AuxInt
+ c := auxIntToInt8(v_0.AuxInt)
if !(c > 0 && c < 32 && (1<<uint(32-c)) <= uint32(n)) {
break
}
// cond: uint32(m) < uint32(n)
// result: (FlagLT)
for {
- n := v.AuxInt
+ n := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XANDWconst {
break
}
- m := v_0.AuxInt
+ m := auxIntToInt32(v_0.AuxInt)
if !(uint32(m) < uint32(n)) {
break
}
// match: (CMPWUconst (MOVWreg x) [c])
// result: (CMPWUconst x [c])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XMOVWreg {
break
}
x := v_0.Args[0]
v.reset(OpS390XCMPWUconst)
- v.AuxInt = c
+ v.AuxInt = int32ToAuxInt(c)
v.AddArg(x)
return true
}
// match: (CMPWUconst (MOVWZreg x) [c])
// result: (CMPWUconst x [c])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XMOVWZreg {
break
}
x := v_0.Args[0]
v.reset(OpS390XCMPWUconst)
- v.AuxInt = c
+ v.AuxInt = int32ToAuxInt(c)
v.AddArg(x)
return true
}
// cond: int32(x)==int32(y)
// result: (FlagEQ)
for {
- y := v.AuxInt
+ y := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XMOVDconst {
break
}
- x := v_0.AuxInt
+ x := auxIntToInt64(v_0.AuxInt)
if !(int32(x) == int32(y)) {
break
}
// cond: int32(x)<int32(y)
// result: (FlagLT)
for {
- y := v.AuxInt
+ y := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XMOVDconst {
break
}
- x := v_0.AuxInt
+ x := auxIntToInt64(v_0.AuxInt)
if !(int32(x) < int32(y)) {
break
}
// cond: int32(x)>int32(y)
// result: (FlagGT)
for {
- y := v.AuxInt
+ y := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XMOVDconst {
break
}
- x := v_0.AuxInt
+ x := auxIntToInt64(v_0.AuxInt)
if !(int32(x) > int32(y)) {
break
}
// cond: 0xff < c
// result: (FlagLT)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XMOVBZreg || !(0xff < c) {
break
}
// cond: 0xffff < c
// result: (FlagLT)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XMOVHZreg || !(0xffff < c) {
break
}
// cond: c > 0 && n < 0
// result: (FlagGT)
for {
- n := v.AuxInt
+ n := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XSRWconst {
break
}
- c := v_0.AuxInt
+ c := auxIntToInt8(v_0.AuxInt)
if !(c > 0 && n < 0) {
break
}
// cond: int32(m) >= 0 && int32(m) < int32(n)
// result: (FlagLT)
for {
- n := v.AuxInt
+ n := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XANDWconst {
break
}
- m := v_0.AuxInt
+ m := auxIntToInt32(v_0.AuxInt)
if !(int32(m) >= 0 && int32(m) < int32(n)) {
break
}
// cond: c > 0 && n >= 0
// result: (CMPWUconst x [n])
for {
- n := v.AuxInt
+ n := auxIntToInt32(v.AuxInt)
x := v_0
if x.Op != OpS390XSRWconst {
break
}
- c := x.AuxInt
+ c := auxIntToInt8(x.AuxInt)
if !(c > 0 && n >= 0) {
break
}
v.reset(OpS390XCMPWUconst)
- v.AuxInt = n
+ v.AuxInt = int32ToAuxInt(n)
v.AddArg(x)
return true
}
// match: (CMPWconst (MOVWreg x) [c])
// result: (CMPWconst x [c])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XMOVWreg {
break
}
x := v_0.Args[0]
v.reset(OpS390XCMPWconst)
- v.AuxInt = c
+ v.AuxInt = int32ToAuxInt(c)
v.AddArg(x)
return true
}
// match: (CMPWconst (MOVWZreg x) [c])
// result: (CMPWconst x [c])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XMOVWZreg {
break
}
x := v_0.Args[0]
v.reset(OpS390XCMPWconst)
- v.AuxInt = c
+ v.AuxInt = int32ToAuxInt(c)
v.AddArg(x)
return true
}
// cond: c > 0 && n < 0
// result: (FlagGT)
for {
- n := v.AuxInt
+ n := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XSRDconst {
break
}
- c := v_0.AuxInt
+ c := auxIntToInt8(v_0.AuxInt)
if !(c > 0 && n < 0) {
break
}
// match: (CMPconst (MOVWreg x) [c])
// result: (CMPWconst x [c])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XMOVWreg {
break
}
x := v_0.Args[0]
v.reset(OpS390XCMPWconst)
- v.AuxInt = c
+ v.AuxInt = int32ToAuxInt(c)
v.AddArg(x)
return true
}
// match: (CMPconst x:(MOVHreg _) [c])
// result: (CMPWconst x [c])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
x := v_0
if x.Op != OpS390XMOVHreg {
break
}
v.reset(OpS390XCMPWconst)
- v.AuxInt = c
+ v.AuxInt = int32ToAuxInt(c)
v.AddArg(x)
return true
}
// match: (CMPconst x:(MOVHZreg _) [c])
// result: (CMPWconst x [c])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
x := v_0
if x.Op != OpS390XMOVHZreg {
break
}
v.reset(OpS390XCMPWconst)
- v.AuxInt = c
+ v.AuxInt = int32ToAuxInt(c)
v.AddArg(x)
return true
}
// match: (CMPconst x:(MOVBreg _) [c])
// result: (CMPWconst x [c])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
x := v_0
if x.Op != OpS390XMOVBreg {
break
}
v.reset(OpS390XCMPWconst)
- v.AuxInt = c
+ v.AuxInt = int32ToAuxInt(c)
v.AddArg(x)
return true
}
// match: (CMPconst x:(MOVBZreg _) [c])
// result: (CMPWconst x [c])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
x := v_0
if x.Op != OpS390XMOVBZreg {
break
}
v.reset(OpS390XCMPWconst)
- v.AuxInt = c
+ v.AuxInt = int32ToAuxInt(c)
v.AddArg(x)
return true
}
// cond: int32(m) >= 0 && c >= 0
// result: (CMPWUconst x [c])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XMOVWZreg {
break
}
if x.Op != OpS390XANDWconst {
break
}
- m := x.AuxInt
+ m := auxIntToInt32(x.AuxInt)
if !(int32(m) >= 0 && c >= 0) {
break
}
v.reset(OpS390XCMPWUconst)
- v.AuxInt = c
+ v.AuxInt = int32ToAuxInt(c)
v.AddArg(x)
return true
}
// cond: int32(m) >= 0 && c >= 0
// result: (CMPWUconst x [c])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpS390XMOVWreg {
break
}
if x.Op != OpS390XANDWconst {
break
}
- m := x.AuxInt
+ m := auxIntToInt32(x.AuxInt)
if !(int32(m) >= 0 && c >= 0) {
break
}
v.reset(OpS390XCMPWUconst)
- v.AuxInt = c
+ v.AuxInt = int32ToAuxInt(c)
v.AddArg(x)
return true
}
// cond: c > 0 && n >= 0
// result: (CMPUconst x [n])
for {
- n := v.AuxInt
+ n := auxIntToInt32(v.AuxInt)
x := v_0
if x.Op != OpS390XSRDconst {
break
}
- c := x.AuxInt
+ c := auxIntToInt8(x.AuxInt)
if !(c > 0 && n >= 0) {
break
}
v.reset(OpS390XCMPUconst)
- v.AuxInt = n
+ v.AuxInt = int32ToAuxInt(n)
v.AddArg(x)
return true
}
// cond: ptr.Op != OpSB
// result: (FMOVDloadidx [off] {sym} ptr idx mem)
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
if v_0.Op != OpS390XADD {
break
}
continue
}
v.reset(OpS390XFMOVDloadidx)
- v.AuxInt = off
- v.Aux = sym
+ v.AuxInt = int32ToAuxInt(off)
+ v.Aux = symToAux(sym)
v.AddArg3(ptr, idx, mem)
return true
}
// cond: ptr.Op != OpSB
// result: (FMOVDstoreidx [off] {sym} ptr idx val mem)
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
if v_0.Op != OpS390XADD {
break
}
continue
}
v.reset(OpS390XFMOVDstoreidx)
- v.AuxInt = off
- v.Aux = sym
+ v.AuxInt = int32ToAuxInt(off)
+ v.Aux = symToAux(sym)
v.AddArg4(ptr, idx, val, mem)
return true
}
// cond: ptr.Op != OpSB
// result: (FMOVSloadidx [off] {sym} ptr idx mem)
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
if v_0.Op != OpS390XADD {
break
}
continue
}
v.reset(OpS390XFMOVSloadidx)
- v.AuxInt = off
- v.Aux = sym
+ v.AuxInt = int32ToAuxInt(off)
+ v.Aux = symToAux(sym)
v.AddArg3(ptr, idx, mem)
return true
}
// cond: ptr.Op != OpSB
// result: (FMOVSstoreidx [off] {sym} ptr idx val mem)
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
if v_0.Op != OpS390XADD {
break
}
continue
}
v.reset(OpS390XFMOVSstoreidx)
- v.AuxInt = off
- v.Aux = sym
+ v.AuxInt = int32ToAuxInt(off)
+ v.Aux = symToAux(sym)
v.AddArg4(ptr, idx, val, mem)
return true
}
// cond: ptr.Op != OpSB
// result: (MOVBZloadidx [off] {sym} ptr idx mem)
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
if v_0.Op != OpS390XADD {
break
}
continue
}
v.reset(OpS390XMOVBZloadidx)
- v.AuxInt = off
- v.Aux = sym
+ v.AuxInt = int32ToAuxInt(off)
+ v.Aux = symToAux(sym)
v.AddArg3(ptr, idx, mem)
return true
}
// cond: ptr.Op != OpSB
// result: (MOVBloadidx [off] {sym} ptr idx mem)
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
if v_0.Op != OpS390XADD {
break
}
continue
}
v.reset(OpS390XMOVBloadidx)
- v.AuxInt = off
- v.Aux = sym
+ v.AuxInt = int32ToAuxInt(off)
+ v.Aux = symToAux(sym)
v.AddArg3(ptr, idx, mem)
return true
}
// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
// result: (MOVBstore [off] {sym} ptr x mem)
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
ptr := v_0
if v_1.Op != OpS390XMOVBreg {
break
x := v_1.Args[0]
mem := v_2
v.reset(OpS390XMOVBstore)
- v.AuxInt = off
- v.Aux = sym
+ v.AuxInt = int32ToAuxInt(off)
+ v.Aux = symToAux(sym)
v.AddArg3(ptr, x, mem)
return true
}
// match: (MOVBstore [off] {sym} ptr (MOVBZreg x) mem)
// result: (MOVBstore [off] {sym} ptr x mem)
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
ptr := v_0
if v_1.Op != OpS390XMOVBZreg {
break
x := v_1.Args[0]
mem := v_2
v.reset(OpS390XMOVBstore)
- v.AuxInt = off
- v.Aux = sym
+ v.AuxInt = int32ToAuxInt(off)
+ v.Aux = symToAux(sym)
v.AddArg3(ptr, x, mem)
return true
}
// cond: ptr.Op != OpSB
// result: (MOVBstoreidx [off] {sym} ptr idx val mem)
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
if v_0.Op != OpS390XADD {
break
}
continue
}
v.reset(OpS390XMOVBstoreidx)
- v.AuxInt = off
- v.Aux = sym
+ v.AuxInt = int32ToAuxInt(off)
+ v.Aux = symToAux(sym)
v.AddArg4(ptr, idx, val, mem)
return true
}
// cond: ptr.Op != OpSB
// result: (MOVDloadidx [off] {sym} ptr idx mem)
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
if v_0.Op != OpS390XADD {
break
}
continue
}
v.reset(OpS390XMOVDloadidx)
- v.AuxInt = off
- v.Aux = sym
+ v.AuxInt = int32ToAuxInt(off)
+ v.Aux = symToAux(sym)
v.AddArg3(ptr, idx, mem)
return true
}
// cond: ptr.Op != OpSB
// result: (MOVDstoreidx [off] {sym} ptr idx val mem)
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
if v_0.Op != OpS390XADD {
break
}
continue
}
v.reset(OpS390XMOVDstoreidx)
- v.AuxInt = off
- v.Aux = sym
+ v.AuxInt = int32ToAuxInt(off)
+ v.Aux = symToAux(sym)
v.AddArg4(ptr, idx, val, mem)
return true
}
// cond: ptr.Op != OpSB
// result: (MOVHZloadidx [off] {sym} ptr idx mem)
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
if v_0.Op != OpS390XADD {
break
}
continue
}
v.reset(OpS390XMOVHZloadidx)
- v.AuxInt = off
- v.Aux = sym
+ v.AuxInt = int32ToAuxInt(off)
+ v.Aux = symToAux(sym)
v.AddArg3(ptr, idx, mem)
return true
}
// cond: ptr.Op != OpSB
// result: (MOVHloadidx [off] {sym} ptr idx mem)
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
if v_0.Op != OpS390XADD {
break
}
continue
}
v.reset(OpS390XMOVHloadidx)
- v.AuxInt = off
- v.Aux = sym
+ v.AuxInt = int32ToAuxInt(off)
+ v.Aux = symToAux(sym)
v.AddArg3(ptr, idx, mem)
return true
}
// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
// result: (MOVHstore [off] {sym} ptr x mem)
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
ptr := v_0
if v_1.Op != OpS390XMOVHreg {
break
x := v_1.Args[0]
mem := v_2
v.reset(OpS390XMOVHstore)
- v.AuxInt = off
- v.Aux = sym
+ v.AuxInt = int32ToAuxInt(off)
+ v.Aux = symToAux(sym)
v.AddArg3(ptr, x, mem)
return true
}
// match: (MOVHstore [off] {sym} ptr (MOVHZreg x) mem)
// result: (MOVHstore [off] {sym} ptr x mem)
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
ptr := v_0
if v_1.Op != OpS390XMOVHZreg {
break
x := v_1.Args[0]
mem := v_2
v.reset(OpS390XMOVHstore)
- v.AuxInt = off
- v.Aux = sym
+ v.AuxInt = int32ToAuxInt(off)
+ v.Aux = symToAux(sym)
v.AddArg3(ptr, x, mem)
return true
}
// cond: ptr.Op != OpSB
// result: (MOVHstoreidx [off] {sym} ptr idx val mem)
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
if v_0.Op != OpS390XADD {
break
}
continue
}
v.reset(OpS390XMOVHstoreidx)
- v.AuxInt = off
- v.Aux = sym
+ v.AuxInt = int32ToAuxInt(off)
+ v.Aux = symToAux(sym)
v.AddArg4(ptr, idx, val, mem)
return true
}
// cond: ptr.Op != OpSB
// result: (MOVWZloadidx [off] {sym} ptr idx mem)
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
if v_0.Op != OpS390XADD {
break
}
continue
}
v.reset(OpS390XMOVWZloadidx)
- v.AuxInt = off
- v.Aux = sym
+ v.AuxInt = int32ToAuxInt(off)
+ v.Aux = symToAux(sym)
v.AddArg3(ptr, idx, mem)
return true
}
// cond: ptr.Op != OpSB
// result: (MOVWloadidx [off] {sym} ptr idx mem)
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
if v_0.Op != OpS390XADD {
break
}
continue
}
v.reset(OpS390XMOVWloadidx)
- v.AuxInt = off
- v.Aux = sym
+ v.AuxInt = int32ToAuxInt(off)
+ v.Aux = symToAux(sym)
v.AddArg3(ptr, idx, mem)
return true
}
// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
// result: (MOVWstore [off] {sym} ptr x mem)
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
ptr := v_0
if v_1.Op != OpS390XMOVWreg {
break
x := v_1.Args[0]
mem := v_2
v.reset(OpS390XMOVWstore)
- v.AuxInt = off
- v.Aux = sym
+ v.AuxInt = int32ToAuxInt(off)
+ v.Aux = symToAux(sym)
v.AddArg3(ptr, x, mem)
return true
}
// match: (MOVWstore [off] {sym} ptr (MOVWZreg x) mem)
// result: (MOVWstore [off] {sym} ptr x mem)
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
ptr := v_0
if v_1.Op != OpS390XMOVWZreg {
break
x := v_1.Args[0]
mem := v_2
v.reset(OpS390XMOVWstore)
- v.AuxInt = off
- v.Aux = sym
+ v.AuxInt = int32ToAuxInt(off)
+ v.Aux = symToAux(sym)
v.AddArg3(ptr, x, mem)
return true
}
// cond: ptr.Op != OpSB
// result: (MOVWstoreidx [off] {sym} ptr idx val mem)
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
if v_0.Op != OpS390XADD {
break
}
continue
}
v.reset(OpS390XMOVWstoreidx)
- v.AuxInt = off
- v.Aux = sym
+ v.AuxInt = int32ToAuxInt(off)
+ v.Aux = symToAux(sym)
v.AddArg4(ptr, idx, val, mem)
return true
}
if v_0.Op != OpS390XMOVDconst {
break
}
- c := v_0.AuxInt
+ c := auxIntToInt64(v_0.AuxInt)
v.reset(OpS390XMOVDconst)
- v.AuxInt = -c
+ v.AuxInt = int64ToAuxInt(-c)
return true
}
// match: (NEG (ADDconst [c] (NEG x)))
if v_0.Op != OpS390XADDconst {
break
}
- c := v_0.AuxInt
+ c := auxIntToInt32(v_0.AuxInt)
v_0_0 := v_0.Args[0]
if v_0_0.Op != OpS390XNEG {
break
break
}
v.reset(OpS390XADDconst)
- v.AuxInt = -c
+ v.AuxInt = int32ToAuxInt(-c)
v.AddArg(x)
return true
}
if v_0.Op != OpS390XMOVDconst {
break
}
- c := v_0.AuxInt
+ c := auxIntToInt64(v_0.AuxInt)
v.reset(OpS390XMOVDconst)
- v.AuxInt = int64(int32(-c))
+ v.AuxInt = int64ToAuxInt(int64(int32(-c)))
return true
}
return false
if v_0.Op != OpS390XMOVDconst {
continue
}
- c := v_0.AuxInt
+ c := auxIntToInt64(v_0.AuxInt)
if v_1.Op != OpS390XMOVDconst {
continue
}
- d := v_1.AuxInt
+ d := auxIntToInt64(v_1.AuxInt)
v.reset(OpS390XMOVDconst)
- v.AuxInt = c | d
+ v.AuxInt = int64ToAuxInt(c | d)
return true
}
break
// cond: int32(c)==0
// result: x
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
x := v_0
if !(int32(c) == 0) {
break
// cond: int32(c)==-1
// result: (MOVDconst [-1])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if !(int32(c) == -1) {
break
}
v.reset(OpS390XMOVDconst)
- v.AuxInt = -1
+ v.AuxInt = int64ToAuxInt(-1)
return true
}
// match: (ORWconst [c] (MOVDconst [d]))
// match: (ORconst [0] x)
// result: x
for {
- if v.AuxInt != 0 {
+ if auxIntToInt64(v.AuxInt) != 0 {
break
}
x := v_0
// match: (ORconst [-1] _)
// result: (MOVDconst [-1])
for {
- if v.AuxInt != -1 {
+ if auxIntToInt64(v.AuxInt) != -1 {
break
}
v.reset(OpS390XMOVDconst)
- v.AuxInt = -1
+ v.AuxInt = int64ToAuxInt(-1)
return true
}
// match: (ORconst [c] (MOVDconst [d]))
// result: (MOVDconst [c|d])
for {
- c := v.AuxInt
+ c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpS390XMOVDconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt64(v_0.AuxInt)
v.reset(OpS390XMOVDconst)
- v.AuxInt = c | d
+ v.AuxInt = int64ToAuxInt(c | d)
return true
}
return false
// match: (SRADconst [c] (MOVDconst [d]))
// result: (MOVDconst [d>>uint64(c)])
for {
- c := v.AuxInt
+ c := auxIntToInt8(v.AuxInt)
if v_0.Op != OpS390XMOVDconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt64(v_0.AuxInt)
v.reset(OpS390XMOVDconst)
- v.AuxInt = d >> uint64(c)
+ v.AuxInt = int64ToAuxInt(d >> uint64(c))
return true
}
return false
// match: (SRAWconst [c] (MOVDconst [d]))
// result: (MOVDconst [int64(int32(d))>>uint64(c)])
for {
- c := v.AuxInt
+ c := auxIntToInt8(v.AuxInt)
if v_0.Op != OpS390XMOVDconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt64(v_0.AuxInt)
v.reset(OpS390XMOVDconst)
- v.AuxInt = int64(int32(d)) >> uint64(c)
+ v.AuxInt = int64ToAuxInt(int64(int32(d)) >> uint64(c))
return true
}
return false
break
}
v.reset(OpS390XMOVDconst)
- v.AuxInt = 0
+ v.AuxInt = int64ToAuxInt(0)
return true
}
// match: (SUB <t> x g:(MOVDload [off] {sym} ptr mem))
}
_ = v_2_0.Args[1]
v_2_0_0 := v_2_0.Args[0]
- if v_2_0_0.Op != OpS390XMOVDconst || v_2_0_0.AuxInt != 0 {
+ if v_2_0_0.Op != OpS390XMOVDconst || auxIntToInt64(v_2_0_0.AuxInt) != 0 {
break
}
v_2_0_1 := v_2_0.Args[1]
}
c := v_2_0_1_0_0.Args[2]
v_2_0_1_0_0_0 := v_2_0_1_0_0.Args[0]
- if v_2_0_1_0_0_0.Op != OpS390XMOVDconst || v_2_0_1_0_0_0.AuxInt != 0 {
+ if v_2_0_1_0_0_0.Op != OpS390XMOVDconst || auxIntToInt64(v_2_0_1_0_0_0.AuxInt) != 0 {
break
}
v_2_0_1_0_0_1 := v_2_0_1_0_0.Args[1]
- if v_2_0_1_0_0_1.Op != OpS390XMOVDconst || v_2_0_1_0_0_1.AuxInt != 0 {
+ if v_2_0_1_0_0_1.Op != OpS390XMOVDconst || auxIntToInt64(v_2_0_1_0_0_1.AuxInt) != 0 {
break
}
v.reset(OpS390XSUBE)
break
}
v.reset(OpS390XMOVDconst)
- v.AuxInt = 0
+ v.AuxInt = int64ToAuxInt(0)
return true
}
// match: (SUBW <t> x g:(MOVWload [off] {sym} ptr mem))
// cond: int32(c) == 0
// result: x
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
x := v_0
if !(int32(c) == 0) {
break
// match: (SUBconst [0] x)
// result: x
for {
- if v.AuxInt != 0 {
+ if auxIntToInt32(v.AuxInt) != 0 {
break
}
x := v_0
// cond: c != -(1<<31)
// result: (ADDconst [-c] x)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
x := v_0
if !(c != -(1 << 31)) {
break
}
v.reset(OpS390XADDconst)
- v.AuxInt = -c
+ v.AuxInt = int32ToAuxInt(-c)
v.AddArg(x)
return true
}
if v_0.Op != OpS390XMOVDconst {
continue
}
- c := v_0.AuxInt
+ c := auxIntToInt64(v_0.AuxInt)
if v_1.Op != OpS390XMOVDconst {
continue
}
- d := v_1.AuxInt
+ d := auxIntToInt64(v_1.AuxInt)
v.reset(OpS390XMOVDconst)
- v.AuxInt = c ^ d
+ v.AuxInt = int64ToAuxInt(c ^ d)
return true
}
break
break
}
v.reset(OpS390XMOVDconst)
- v.AuxInt = 0
+ v.AuxInt = int64ToAuxInt(0)
return true
}
// match: (XOR <t> x g:(MOVDload [off] {sym} ptr mem))
break
}
v.reset(OpS390XMOVDconst)
- v.AuxInt = 0
+ v.AuxInt = int64ToAuxInt(0)
return true
}
// match: (XORW <t> x g:(MOVWload [off] {sym} ptr mem))
// cond: int32(c)==0
// result: x
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
x := v_0
if !(int32(c) == 0) {
break
// match: (XORconst [0] x)
// result: x
for {
- if v.AuxInt != 0 {
+ if auxIntToInt64(v.AuxInt) != 0 {
break
}
x := v_0
// match: (XORconst [c] (MOVDconst [d]))
// result: (MOVDconst [c^d])
for {
- c := v.AuxInt
+ c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpS390XMOVDconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt64(v_0.AuxInt)
v.reset(OpS390XMOVDconst)
- v.AuxInt = c ^ d
+ v.AuxInt = int64ToAuxInt(c ^ d)
return true
}
return false
if v_0_0.Op != OpS390XMOVDconst {
break
}
- c := v_0_0.AuxInt
+ c := auxIntToInt64(v_0_0.AuxInt)
v_0_1 := v_0.Args[1]
if v_0_1.Op != OpS390XMOVDconst {
break
}
- d := v_0_1.AuxInt
+ d := auxIntToInt64(v_0_1.AuxInt)
v.reset(OpS390XMOVDconst)
- v.AuxInt = c - d
+ v.AuxInt = int64ToAuxInt(c - d)
return true
}
// match: (Select0 (FADD (FMUL y z) x))
if v_0_0.Op != OpS390XMOVDconst {
break
}
- c := v_0_0.AuxInt
+ c := auxIntToInt64(v_0_0.AuxInt)
v_0_1 := v_0.Args[1]
if v_0_1.Op != OpS390XMOVDconst {
break
}
- d := v_0_1.AuxInt
+ d := auxIntToInt64(v_0_1.AuxInt)
if !(uint64(d) <= uint64(c) && c-d == 0) {
break
}
if v_0_0.Op != OpS390XMOVDconst {
break
}
- c := v_0_0.AuxInt
+ c := auxIntToInt64(v_0_0.AuxInt)
v_0_1 := v_0.Args[1]
if v_0_1.Op != OpS390XMOVDconst {
break
}
- d := v_0_1.AuxInt
+ d := auxIntToInt64(v_0_1.AuxInt)
if !(uint64(d) <= uint64(c) && c-d != 0) {
break
}