(BICconst [c] _) && int32(c)==-1 => (MOVWconst [0])
// generic constant folding
-(ADDconst [c] x) && !isARMImmRot(uint32(c)) && isARMImmRot(uint32(-c)) -> (SUBconst [int64(int32(-c))] x)
-(SUBconst [c] x) && !isARMImmRot(uint32(c)) && isARMImmRot(uint32(-c)) -> (ADDconst [int64(int32(-c))] x)
-(ANDconst [c] x) && !isARMImmRot(uint32(c)) && isARMImmRot(^uint32(c)) -> (BICconst [int64(int32(^uint32(c)))] x)
-(BICconst [c] x) && !isARMImmRot(uint32(c)) && isARMImmRot(^uint32(c)) -> (ANDconst [int64(int32(^uint32(c)))] x)
-(ADDconst [c] x) && objabi.GOARM==7 && !isARMImmRot(uint32(c)) && uint32(c)>0xffff && uint32(-c)<=0xffff -> (SUBconst [int64(int32(-c))] x)
-(SUBconst [c] x) && objabi.GOARM==7 && !isARMImmRot(uint32(c)) && uint32(c)>0xffff && uint32(-c)<=0xffff -> (ANDconst [int64(int32(-c))] x)
-(ANDconst [c] x) && objabi.GOARM==7 && !isARMImmRot(uint32(c)) && uint32(c)>0xffff && ^uint32(c)<=0xffff -> (BICconst [int64(int32(^uint32(c)))] x)
-(BICconst [c] x) && objabi.GOARM==7 && !isARMImmRot(uint32(c)) && uint32(c)>0xffff && ^uint32(c)<=0xffff -> (ANDconst [int64(int32(^uint32(c)))] x)
-(ADDconst [c] (MOVWconst [d])) -> (MOVWconst [int64(int32(c+d))])
-(ADDconst [c] (ADDconst [d] x)) -> (ADDconst [int64(int32(c+d))] x)
-(ADDconst [c] (SUBconst [d] x)) -> (ADDconst [int64(int32(c-d))] x)
-(ADDconst [c] (RSBconst [d] x)) -> (RSBconst [int64(int32(c+d))] x)
-(ADCconst [c] (ADDconst [d] x) flags) -> (ADCconst [int64(int32(c+d))] x flags)
-(ADCconst [c] (SUBconst [d] x) flags) -> (ADCconst [int64(int32(c-d))] x flags)
-(SUBconst [c] (MOVWconst [d])) -> (MOVWconst [int64(int32(d-c))])
-(SUBconst [c] (SUBconst [d] x)) -> (ADDconst [int64(int32(-c-d))] x)
-(SUBconst [c] (ADDconst [d] x)) -> (ADDconst [int64(int32(-c+d))] x)
-(SUBconst [c] (RSBconst [d] x)) -> (RSBconst [int64(int32(-c+d))] x)
-(SBCconst [c] (ADDconst [d] x) flags) -> (SBCconst [int64(int32(c-d))] x flags)
-(SBCconst [c] (SUBconst [d] x) flags) -> (SBCconst [int64(int32(c+d))] x flags)
-(RSBconst [c] (MOVWconst [d])) -> (MOVWconst [int64(int32(c-d))])
-(RSBconst [c] (RSBconst [d] x)) -> (ADDconst [int64(int32(c-d))] x)
-(RSBconst [c] (ADDconst [d] x)) -> (RSBconst [int64(int32(c-d))] x)
-(RSBconst [c] (SUBconst [d] x)) -> (RSBconst [int64(int32(c+d))] x)
-(RSCconst [c] (ADDconst [d] x) flags) -> (RSCconst [int64(int32(c-d))] x flags)
-(RSCconst [c] (SUBconst [d] x) flags) -> (RSCconst [int64(int32(c+d))] x flags)
-(SLLconst [c] (MOVWconst [d])) -> (MOVWconst [int64(int32(uint32(d)<<uint64(c)))])
-(SRLconst [c] (MOVWconst [d])) -> (MOVWconst [int64(int32(uint32(d)>>uint64(c)))])
-(SRAconst [c] (MOVWconst [d])) -> (MOVWconst [int64(int32(d)>>uint64(c))])
-(MUL (MOVWconst [c]) (MOVWconst [d])) -> (MOVWconst [int64(int32(c*d))])
-(MULA (MOVWconst [c]) (MOVWconst [d]) a) -> (ADDconst [int64(int32(c*d))] a)
-(MULS (MOVWconst [c]) (MOVWconst [d]) a) -> (SUBconst [int64(int32(c*d))] a)
-(Select0 (CALLudiv (MOVWconst [c]) (MOVWconst [d]))) -> (MOVWconst [int64(int32(uint32(c)/uint32(d)))])
-(Select1 (CALLudiv (MOVWconst [c]) (MOVWconst [d]))) -> (MOVWconst [int64(int32(uint32(c)%uint32(d)))])
+(ADDconst [c] x) && !isARMImmRot(uint32(c)) && isARMImmRot(uint32(-c)) => (SUBconst [-c] x)
+(SUBconst [c] x) && !isARMImmRot(uint32(c)) && isARMImmRot(uint32(-c)) => (ADDconst [-c] x)
+(ANDconst [c] x) && !isARMImmRot(uint32(c)) && isARMImmRot(^uint32(c)) => (BICconst [int32(^uint32(c))] x)
+(BICconst [c] x) && !isARMImmRot(uint32(c)) && isARMImmRot(^uint32(c)) => (ANDconst [int32(^uint32(c))] x)
+(ADDconst [c] x) && objabi.GOARM==7 && !isARMImmRot(uint32(c)) && uint32(c)>0xffff && uint32(-c)<=0xffff => (SUBconst [-c] x)
+(SUBconst [c] x) && objabi.GOARM==7 && !isARMImmRot(uint32(c)) && uint32(c)>0xffff && uint32(-c)<=0xffff => (ADDconst [-c] x)
+(ANDconst [c] x) && objabi.GOARM==7 && !isARMImmRot(uint32(c)) && uint32(c)>0xffff && ^uint32(c)<=0xffff => (BICconst [int32(^uint32(c))] x)
+(BICconst [c] x) && objabi.GOARM==7 && !isARMImmRot(uint32(c)) && uint32(c)>0xffff && ^uint32(c)<=0xffff => (ANDconst [int32(^uint32(c))] x)
+(ADDconst [c] (MOVWconst [d])) => (MOVWconst [c+d])
+(ADDconst [c] (ADDconst [d] x)) => (ADDconst [c+d] x)
+(ADDconst [c] (SUBconst [d] x)) => (ADDconst [c-d] x)
+(ADDconst [c] (RSBconst [d] x)) => (RSBconst [c+d] x)
+(ADCconst [c] (ADDconst [d] x) flags) => (ADCconst [c+d] x flags)
+(ADCconst [c] (SUBconst [d] x) flags) => (ADCconst [c-d] x flags)
+(SUBconst [c] (MOVWconst [d])) => (MOVWconst [d-c])
+(SUBconst [c] (SUBconst [d] x)) => (ADDconst [-c-d] x)
+(SUBconst [c] (ADDconst [d] x)) => (ADDconst [-c+d] x)
+(SUBconst [c] (RSBconst [d] x)) => (RSBconst [-c+d] x)
+(SBCconst [c] (ADDconst [d] x) flags) => (SBCconst [c-d] x flags)
+(SBCconst [c] (SUBconst [d] x) flags) => (SBCconst [c+d] x flags)
+(RSBconst [c] (MOVWconst [d])) => (MOVWconst [c-d])
+(RSBconst [c] (RSBconst [d] x)) => (ADDconst [c-d] x)
+(RSBconst [c] (ADDconst [d] x)) => (RSBconst [c-d] x)
+(RSBconst [c] (SUBconst [d] x)) => (RSBconst [c+d] x)
+(RSCconst [c] (ADDconst [d] x) flags) => (RSCconst [c-d] x flags)
+(RSCconst [c] (SUBconst [d] x) flags) => (RSCconst [c+d] x flags)
+(SLLconst [c] (MOVWconst [d])) => (MOVWconst [d<<uint64(c)])
+(SRLconst [c] (MOVWconst [d])) => (MOVWconst [int32(uint32(d)>>uint64(c))])
+(SRAconst [c] (MOVWconst [d])) => (MOVWconst [d>>uint64(c)])
+(MUL (MOVWconst [c]) (MOVWconst [d])) => (MOVWconst [c*d])
+(MULA (MOVWconst [c]) (MOVWconst [d]) a) => (ADDconst [c*d] a)
+(MULS (MOVWconst [c]) (MOVWconst [d]) a) => (SUBconst [c*d] a)
+(Select0 (CALLudiv (MOVWconst [c]) (MOVWconst [d]))) => (MOVWconst [int32(uint32(c)/uint32(d))])
+(Select1 (CALLudiv (MOVWconst [c]) (MOVWconst [d]))) => (MOVWconst [int32(uint32(c)%uint32(d))])
(ANDconst [c] (MOVWconst [d])) => (MOVWconst [c&d])
(ANDconst [c] (ANDconst [d] x)) => (ANDconst [c&d] x)
(ORconst [c] (MOVWconst [d])) => (MOVWconst [c|d])
(XORconst [c] (MOVWconst [d])) => (MOVWconst [c^d])
(XORconst [c] (XORconst [d] x)) => (XORconst [c^d] x)
(BICconst [c] (MOVWconst [d])) => (MOVWconst [d&^c])
-(BICconst [c] (BICconst [d] x)) -> (BICconst [int64(int32(c|d))] x)
+(BICconst [c] (BICconst [d] x)) => (BICconst [c|d] x)
(MVN (MOVWconst [c])) => (MOVWconst [^c])
(MOVBreg (MOVWconst [c])) -> (MOVWconst [int64(int8(c))])
(MOVBUreg (MOVWconst [c])) -> (MOVWconst [int64(uint8(c))])
(MOVHUreg (MOVWconst [c])) -> (MOVWconst [int64(uint16(c))])
(MOVWreg (MOVWconst [c])) => (MOVWconst [c])
// BFX: Width = c >> 8, LSB = c & 0xff, result = d << (32 - Width - LSB) >> (32 - Width)
-(BFX [c] (MOVWconst [d])) -> (MOVWconst [int64(int32(d)<<(32-uint32(c&0xff)-uint32(c>>8))>>(32-uint32(c>>8)))])
-(BFXU [c] (MOVWconst [d])) -> (MOVWconst [int64(int32(uint32(d)<<(32-uint32(c&0xff)-uint32(c>>8))>>(32-uint32(c>>8))))])
+(BFX [c] (MOVWconst [d])) => (MOVWconst [d<<(32-uint32(c&0xff)-uint32(c>>8))>>(32-uint32(c>>8))])
+(BFXU [c] (MOVWconst [d])) => (MOVWconst [int32(uint32(d)<<(32-uint32(c&0xff)-uint32(c>>8))>>(32-uint32(c>>8)))])
// absorb shifts into ops
(ADD x (SLLconst [c] y)) => (ADDshiftLL x y [c])
(CMNshiftRAreg (MOVWconst [c]) x y) => (CMNconst [c] (SRA <x.Type> x y))
// constant folding in *shift ops
-(ADDshiftLL x (MOVWconst [c]) [d]) -> (ADDconst x [int64(int32(uint32(c)<<uint64(d)))])
-(ADDshiftRL x (MOVWconst [c]) [d]) -> (ADDconst x [int64(int32(uint32(c)>>uint64(d)))])
-(ADDshiftRA x (MOVWconst [c]) [d]) -> (ADDconst x [int64(int32(c)>>uint64(d))])
-(ADCshiftLL x (MOVWconst [c]) [d] flags) -> (ADCconst x [int64(int32(uint32(c)<<uint64(d)))] flags)
-(ADCshiftRL x (MOVWconst [c]) [d] flags) -> (ADCconst x [int64(int32(uint32(c)>>uint64(d)))] flags)
-(ADCshiftRA x (MOVWconst [c]) [d] flags) -> (ADCconst x [int64(int32(c)>>uint64(d))] flags)
-(ADDSshiftLL x (MOVWconst [c]) [d]) -> (ADDSconst x [int64(int32(uint32(c)<<uint64(d)))])
-(ADDSshiftRL x (MOVWconst [c]) [d]) -> (ADDSconst x [int64(int32(uint32(c)>>uint64(d)))])
-(ADDSshiftRA x (MOVWconst [c]) [d]) -> (ADDSconst x [int64(int32(c)>>uint64(d))])
-(SUBshiftLL x (MOVWconst [c]) [d]) -> (SUBconst x [int64(int32(uint32(c)<<uint64(d)))])
-(SUBshiftRL x (MOVWconst [c]) [d]) -> (SUBconst x [int64(int32(uint32(c)>>uint64(d)))])
-(SUBshiftRA x (MOVWconst [c]) [d]) -> (SUBconst x [int64(int32(c)>>uint64(d))])
-(SBCshiftLL x (MOVWconst [c]) [d] flags) -> (SBCconst x [int64(int32(uint32(c)<<uint64(d)))] flags)
-(SBCshiftRL x (MOVWconst [c]) [d] flags) -> (SBCconst x [int64(int32(uint32(c)>>uint64(d)))] flags)
-(SBCshiftRA x (MOVWconst [c]) [d] flags) -> (SBCconst x [int64(int32(c)>>uint64(d))] flags)
-(SUBSshiftLL x (MOVWconst [c]) [d]) -> (SUBSconst x [int64(int32(uint32(c)<<uint64(d)))])
-(SUBSshiftRL x (MOVWconst [c]) [d]) -> (SUBSconst x [int64(int32(uint32(c)>>uint64(d)))])
-(SUBSshiftRA x (MOVWconst [c]) [d]) -> (SUBSconst x [int64(int32(c)>>uint64(d))])
-(RSBshiftLL x (MOVWconst [c]) [d]) -> (RSBconst x [int64(int32(uint32(c)<<uint64(d)))])
-(RSBshiftRL x (MOVWconst [c]) [d]) -> (RSBconst x [int64(int32(uint32(c)>>uint64(d)))])
-(RSBshiftRA x (MOVWconst [c]) [d]) -> (RSBconst x [int64(int32(c)>>uint64(d))])
-(RSCshiftLL x (MOVWconst [c]) [d] flags) -> (RSCconst x [int64(int32(uint32(c)<<uint64(d)))] flags)
-(RSCshiftRL x (MOVWconst [c]) [d] flags) -> (RSCconst x [int64(int32(uint32(c)>>uint64(d)))] flags)
-(RSCshiftRA x (MOVWconst [c]) [d] flags) -> (RSCconst x [int64(int32(c)>>uint64(d))] flags)
-(RSBSshiftLL x (MOVWconst [c]) [d]) -> (RSBSconst x [int64(int32(uint32(c)<<uint64(d)))])
-(RSBSshiftRL x (MOVWconst [c]) [d]) -> (RSBSconst x [int64(int32(uint32(c)>>uint64(d)))])
-(RSBSshiftRA x (MOVWconst [c]) [d]) -> (RSBSconst x [int64(int32(c)>>uint64(d))])
-(ANDshiftLL x (MOVWconst [c]) [d]) -> (ANDconst x [int64(int32(uint32(c)<<uint64(d)))])
-(ANDshiftRL x (MOVWconst [c]) [d]) -> (ANDconst x [int64(int32(uint32(c)>>uint64(d)))])
-(ANDshiftRA x (MOVWconst [c]) [d]) -> (ANDconst x [int64(int32(c)>>uint64(d))])
-(ORshiftLL x (MOVWconst [c]) [d]) -> (ORconst x [int64(int32(uint32(c)<<uint64(d)))])
-(ORshiftRL x (MOVWconst [c]) [d]) -> (ORconst x [int64(int32(uint32(c)>>uint64(d)))])
-(ORshiftRA x (MOVWconst [c]) [d]) -> (ORconst x [int64(int32(c)>>uint64(d))])
-(XORshiftLL x (MOVWconst [c]) [d]) -> (XORconst x [int64(int32(uint32(c)<<uint64(d)))])
-(XORshiftRL x (MOVWconst [c]) [d]) -> (XORconst x [int64(int32(uint32(c)>>uint64(d)))])
-(XORshiftRA x (MOVWconst [c]) [d]) -> (XORconst x [int64(int32(c)>>uint64(d))])
-(XORshiftRR x (MOVWconst [c]) [d]) -> (XORconst x [int64(int32(uint32(c)>>uint64(d)|uint32(c)<<uint64(32-d)))])
-(BICshiftLL x (MOVWconst [c]) [d]) -> (BICconst x [int64(int32(uint32(c)<<uint64(d)))])
-(BICshiftRL x (MOVWconst [c]) [d]) -> (BICconst x [int64(int32(uint32(c)>>uint64(d)))])
-(BICshiftRA x (MOVWconst [c]) [d]) -> (BICconst x [int64(int32(c)>>uint64(d))])
-(MVNshiftLL (MOVWconst [c]) [d]) -> (MOVWconst [^int64(uint32(c)<<uint64(d))])
+(ADDshiftLL x (MOVWconst [c]) [d]) => (ADDconst x [c<<uint64(d)])
+(ADDshiftRL x (MOVWconst [c]) [d]) => (ADDconst x [int32(uint32(c)>>uint64(d))])
+(ADDshiftRA x (MOVWconst [c]) [d]) => (ADDconst x [c>>uint64(d)])
+(ADCshiftLL x (MOVWconst [c]) [d] flags) => (ADCconst x [c<<uint64(d)] flags)
+(ADCshiftRL x (MOVWconst [c]) [d] flags) => (ADCconst x [int32(uint32(c)>>uint64(d))] flags)
+(ADCshiftRA x (MOVWconst [c]) [d] flags) => (ADCconst x [c>>uint64(d)] flags)
+(ADDSshiftLL x (MOVWconst [c]) [d]) => (ADDSconst x [c<<uint64(d)])
+(ADDSshiftRL x (MOVWconst [c]) [d]) => (ADDSconst x [int32(uint32(c)>>uint64(d))])
+(ADDSshiftRA x (MOVWconst [c]) [d]) => (ADDSconst x [c>>uint64(d)])
+(SUBshiftLL x (MOVWconst [c]) [d]) => (SUBconst x [c<<uint64(d)])
+(SUBshiftRL x (MOVWconst [c]) [d]) => (SUBconst x [int32(uint32(c)>>uint64(d))])
+(SUBshiftRA x (MOVWconst [c]) [d]) => (SUBconst x [c>>uint64(d)])
+(SBCshiftLL x (MOVWconst [c]) [d] flags) => (SBCconst x [c<<uint64(d)] flags)
+(SBCshiftRL x (MOVWconst [c]) [d] flags) => (SBCconst x [int32(uint32(c)>>uint64(d))] flags)
+(SBCshiftRA x (MOVWconst [c]) [d] flags) => (SBCconst x [c>>uint64(d)] flags)
+(SUBSshiftLL x (MOVWconst [c]) [d]) => (SUBSconst x [c<<uint64(d)])
+(SUBSshiftRL x (MOVWconst [c]) [d]) => (SUBSconst x [int32(uint32(c)>>uint64(d))])
+(SUBSshiftRA x (MOVWconst [c]) [d]) => (SUBSconst x [c>>uint64(d)])
+(RSBshiftLL x (MOVWconst [c]) [d]) => (RSBconst x [c<<uint64(d)])
+(RSBshiftRL x (MOVWconst [c]) [d]) => (RSBconst x [int32(uint32(c)>>uint64(d))])
+(RSBshiftRA x (MOVWconst [c]) [d]) => (RSBconst x [c>>uint64(d)])
+(RSCshiftLL x (MOVWconst [c]) [d] flags) => (RSCconst x [c<<uint64(d)] flags)
+(RSCshiftRL x (MOVWconst [c]) [d] flags) => (RSCconst x [int32(uint32(c)>>uint64(d))] flags)
+(RSCshiftRA x (MOVWconst [c]) [d] flags) => (RSCconst x [c>>uint64(d)] flags)
+(RSBSshiftLL x (MOVWconst [c]) [d]) => (RSBSconst x [c<<uint64(d)])
+(RSBSshiftRL x (MOVWconst [c]) [d]) => (RSBSconst x [int32(uint32(c)>>uint64(d))])
+(RSBSshiftRA x (MOVWconst [c]) [d]) => (RSBSconst x [c>>uint64(d)])
+(ANDshiftLL x (MOVWconst [c]) [d]) => (ANDconst x [c<<uint64(d)])
+(ANDshiftRL x (MOVWconst [c]) [d]) => (ANDconst x [int32(uint32(c)>>uint64(d))])
+(ANDshiftRA x (MOVWconst [c]) [d]) => (ANDconst x [c>>uint64(d)])
+(ORshiftLL x (MOVWconst [c]) [d]) => (ORconst x [c<<uint64(d)])
+(ORshiftRL x (MOVWconst [c]) [d]) => (ORconst x [int32(uint32(c)>>uint64(d))])
+(ORshiftRA x (MOVWconst [c]) [d]) => (ORconst x [c>>uint64(d)])
+(XORshiftLL x (MOVWconst [c]) [d]) => (XORconst x [c<<uint64(d)])
+(XORshiftRL x (MOVWconst [c]) [d]) => (XORconst x [int32(uint32(c)>>uint64(d))])
+(XORshiftRA x (MOVWconst [c]) [d]) => (XORconst x [c>>uint64(d)])
+(XORshiftRR x (MOVWconst [c]) [d]) => (XORconst x [int32(uint32(c)>>uint64(d)|uint32(c)<<uint64(32-d))])
+(BICshiftLL x (MOVWconst [c]) [d]) => (BICconst x [c<<uint64(d)])
+(BICshiftRL x (MOVWconst [c]) [d]) => (BICconst x [int32(uint32(c)>>uint64(d))])
+(BICshiftRA x (MOVWconst [c]) [d]) => (BICconst x [c>>uint64(d)])
+(MVNshiftLL (MOVWconst [c]) [d]) => (MOVWconst [^(c<<uint64(d))])
(MVNshiftRL (MOVWconst [c]) [d]) -> (MOVWconst [^int64(uint32(c)>>uint64(d))])
(MVNshiftRA (MOVWconst [c]) [d]) -> (MOVWconst [^int64(int32(c)>>uint64(d))])
-(CMPshiftLL x (MOVWconst [c]) [d]) -> (CMPconst x [int64(int32(uint32(c)<<uint64(d)))])
-(CMPshiftRL x (MOVWconst [c]) [d]) -> (CMPconst x [int64(int32(uint32(c)>>uint64(d)))])
-(CMPshiftRA x (MOVWconst [c]) [d]) -> (CMPconst x [int64(int32(c)>>uint64(d))])
-(TSTshiftLL x (MOVWconst [c]) [d]) -> (TSTconst x [int64(int32(uint32(c)<<uint64(d)))])
-(TSTshiftRL x (MOVWconst [c]) [d]) -> (TSTconst x [int64(int32(uint32(c)>>uint64(d)))])
-(TSTshiftRA x (MOVWconst [c]) [d]) -> (TSTconst x [int64(int32(c)>>uint64(d))])
-(TEQshiftLL x (MOVWconst [c]) [d]) -> (TEQconst x [int64(int32(uint32(c)<<uint64(d)))])
-(TEQshiftRL x (MOVWconst [c]) [d]) -> (TEQconst x [int64(int32(uint32(c)>>uint64(d)))])
-(TEQshiftRA x (MOVWconst [c]) [d]) -> (TEQconst x [int64(int32(c)>>uint64(d))])
-(CMNshiftLL x (MOVWconst [c]) [d]) -> (CMNconst x [int64(int32(uint32(c)<<uint64(d)))])
-(CMNshiftRL x (MOVWconst [c]) [d]) -> (CMNconst x [int64(int32(uint32(c)>>uint64(d)))])
-(CMNshiftRA x (MOVWconst [c]) [d]) -> (CMNconst x [int64(int32(c)>>uint64(d))])
+(CMPshiftLL x (MOVWconst [c]) [d]) => (CMPconst x [c<<uint64(d)])
+(CMPshiftRL x (MOVWconst [c]) [d]) => (CMPconst x [int32(uint32(c)>>uint64(d))])
+(CMPshiftRA x (MOVWconst [c]) [d]) => (CMPconst x [c>>uint64(d)])
+(TSTshiftLL x (MOVWconst [c]) [d]) => (TSTconst x [c<<uint64(d)])
+(TSTshiftRL x (MOVWconst [c]) [d]) => (TSTconst x [int32(uint32(c)>>uint64(d))])
+(TSTshiftRA x (MOVWconst [c]) [d]) => (TSTconst x [c>>uint64(d)])
+(TEQshiftLL x (MOVWconst [c]) [d]) => (TEQconst x [c<<uint64(d)])
+(TEQshiftRL x (MOVWconst [c]) [d]) => (TEQconst x [int32(uint32(c)>>uint64(d))])
+(TEQshiftRA x (MOVWconst [c]) [d]) => (TEQconst x [c>>uint64(d)])
+(CMNshiftLL x (MOVWconst [c]) [d]) => (CMNconst x [c<<uint64(d)])
+(CMNshiftRL x (MOVWconst [c]) [d]) => (CMNconst x [int32(uint32(c)>>uint64(d))])
+(CMNshiftRA x (MOVWconst [c]) [d]) => (CMNconst x [c>>uint64(d)])
(ADDshiftLLreg x y (MOVWconst [c])) => (ADDshiftLL x y [c])
(ADDshiftRLreg x y (MOVWconst [c])) => (ADDshiftRL x y [c])
(MOVWloadshiftLL ptr (MOVWconst [c]) [d] mem) -> (MOVWload [int64(uint32(c)<<uint64(d))] ptr mem)
(MOVWloadshiftRL ptr (MOVWconst [c]) [d] mem) -> (MOVWload [int64(uint32(c)>>uint64(d))] ptr mem)
-(MOVWloadshiftRA ptr (MOVWconst [c]) [d] mem) -> (MOVWload [int64(int32(c)>>uint64(d))] ptr mem)
+(MOVWloadshiftRA ptr (MOVWconst [c]) [d] mem) => (MOVWload [c>>uint64(d)] ptr mem)
(MOVWstoreshiftLL ptr (MOVWconst [c]) [d] val mem) -> (MOVWstore [int64(uint32(c)<<uint64(d))] ptr val mem)
(MOVWstoreshiftRL ptr (MOVWconst [c]) [d] val mem) -> (MOVWstore [int64(uint32(c)>>uint64(d))] ptr val mem)
-(MOVWstoreshiftRA ptr (MOVWconst [c]) [d] val mem) -> (MOVWstore [int64(int32(c)>>uint64(d))] ptr val mem)
+(MOVWstoreshiftRA ptr (MOVWconst [c]) [d] val mem) => (MOVWstore [c>>uint64(d)] ptr val mem)
// generic simplifications
(ADD x (RSBconst [0] y)) => (SUB x y)
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (ADCconst [c] (ADDconst [d] x) flags)
- // result: (ADCconst [int64(int32(c+d))] x flags)
+ // result: (ADCconst [c+d] x flags)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMADDconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
x := v_0.Args[0]
flags := v_1
v.reset(OpARMADCconst)
- v.AuxInt = int64(int32(c + d))
+ v.AuxInt = int32ToAuxInt(c + d)
v.AddArg2(x, flags)
return true
}
// match: (ADCconst [c] (SUBconst [d] x) flags)
- // result: (ADCconst [int64(int32(c-d))] x flags)
+ // result: (ADCconst [c-d] x flags)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMSUBconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
x := v_0.Args[0]
flags := v_1
v.reset(OpARMADCconst)
- v.AuxInt = int64(int32(c - d))
+ v.AuxInt = int32ToAuxInt(c - d)
v.AddArg2(x, flags)
return true
}
return true
}
// match: (ADCshiftLL x (MOVWconst [c]) [d] flags)
- // result: (ADCconst x [int64(int32(uint32(c)<<uint64(d)))] flags)
+ // result: (ADCconst x [c<<uint64(d)] flags)
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
flags := v_2
v.reset(OpARMADCconst)
- v.AuxInt = int64(int32(uint32(c) << uint64(d)))
+ v.AuxInt = int32ToAuxInt(c << uint64(d))
v.AddArg2(x, flags)
return true
}
return true
}
// match: (ADCshiftRA x (MOVWconst [c]) [d] flags)
- // result: (ADCconst x [int64(int32(c)>>uint64(d))] flags)
+ // result: (ADCconst x [c>>uint64(d)] flags)
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
flags := v_2
v.reset(OpARMADCconst)
- v.AuxInt = int64(int32(c) >> uint64(d))
+ v.AuxInt = int32ToAuxInt(c >> uint64(d))
v.AddArg2(x, flags)
return true
}
return true
}
// match: (ADCshiftRL x (MOVWconst [c]) [d] flags)
- // result: (ADCconst x [int64(int32(uint32(c)>>uint64(d)))] flags)
+ // result: (ADCconst x [int32(uint32(c)>>uint64(d))] flags)
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
flags := v_2
v.reset(OpARMADCconst)
- v.AuxInt = int64(int32(uint32(c) >> uint64(d)))
+ v.AuxInt = int32ToAuxInt(int32(uint32(c) >> uint64(d)))
v.AddArg2(x, flags)
return true
}
return true
}
// match: (ADDSshiftLL x (MOVWconst [c]) [d])
- // result: (ADDSconst x [int64(int32(uint32(c)<<uint64(d)))])
+ // result: (ADDSconst x [c<<uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMADDSconst)
- v.AuxInt = int64(int32(uint32(c) << uint64(d)))
+ v.AuxInt = int32ToAuxInt(c << uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (ADDSshiftRA x (MOVWconst [c]) [d])
- // result: (ADDSconst x [int64(int32(c)>>uint64(d))])
+ // result: (ADDSconst x [c>>uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMADDSconst)
- v.AuxInt = int64(int32(c) >> uint64(d))
+ v.AuxInt = int32ToAuxInt(c >> uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (ADDSshiftRL x (MOVWconst [c]) [d])
- // result: (ADDSconst x [int64(int32(uint32(c)>>uint64(d)))])
+ // result: (ADDSconst x [int32(uint32(c)>>uint64(d))])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMADDSconst)
- v.AuxInt = int64(int32(uint32(c) >> uint64(d)))
+ v.AuxInt = int32ToAuxInt(int32(uint32(c) >> uint64(d)))
v.AddArg(x)
return true
}
}
// match: (ADDconst [c] x)
// cond: !isARMImmRot(uint32(c)) && isARMImmRot(uint32(-c))
- // result: (SUBconst [int64(int32(-c))] x)
+ // result: (SUBconst [-c] x)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
x := v_0
if !(!isARMImmRot(uint32(c)) && isARMImmRot(uint32(-c))) {
break
}
v.reset(OpARMSUBconst)
- v.AuxInt = int64(int32(-c))
+ v.AuxInt = int32ToAuxInt(-c)
v.AddArg(x)
return true
}
// match: (ADDconst [c] x)
// cond: objabi.GOARM==7 && !isARMImmRot(uint32(c)) && uint32(c)>0xffff && uint32(-c)<=0xffff
- // result: (SUBconst [int64(int32(-c))] x)
+ // result: (SUBconst [-c] x)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
x := v_0
if !(objabi.GOARM == 7 && !isARMImmRot(uint32(c)) && uint32(c) > 0xffff && uint32(-c) <= 0xffff) {
break
}
v.reset(OpARMSUBconst)
- v.AuxInt = int64(int32(-c))
+ v.AuxInt = int32ToAuxInt(-c)
v.AddArg(x)
return true
}
// match: (ADDconst [c] (MOVWconst [d]))
- // result: (MOVWconst [int64(int32(c+d))])
+ // result: (MOVWconst [c+d])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMMOVWconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
v.reset(OpARMMOVWconst)
- v.AuxInt = int64(int32(c + d))
+ v.AuxInt = int32ToAuxInt(c + d)
return true
}
// match: (ADDconst [c] (ADDconst [d] x))
- // result: (ADDconst [int64(int32(c+d))] x)
+ // result: (ADDconst [c+d] x)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMADDconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
x := v_0.Args[0]
v.reset(OpARMADDconst)
- v.AuxInt = int64(int32(c + d))
+ v.AuxInt = int32ToAuxInt(c + d)
v.AddArg(x)
return true
}
// match: (ADDconst [c] (SUBconst [d] x))
- // result: (ADDconst [int64(int32(c-d))] x)
+ // result: (ADDconst [c-d] x)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMSUBconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
x := v_0.Args[0]
v.reset(OpARMADDconst)
- v.AuxInt = int64(int32(c - d))
+ v.AuxInt = int32ToAuxInt(c - d)
v.AddArg(x)
return true
}
// match: (ADDconst [c] (RSBconst [d] x))
- // result: (RSBconst [int64(int32(c+d))] x)
+ // result: (RSBconst [c+d] x)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMRSBconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
x := v_0.Args[0]
v.reset(OpARMRSBconst)
- v.AuxInt = int64(int32(c + d))
+ v.AuxInt = int32ToAuxInt(c + d)
v.AddArg(x)
return true
}
return true
}
// match: (ADDshiftLL x (MOVWconst [c]) [d])
- // result: (ADDconst x [int64(int32(uint32(c)<<uint64(d)))])
+ // result: (ADDconst x [c<<uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMADDconst)
- v.AuxInt = int64(int32(uint32(c) << uint64(d)))
+ v.AuxInt = int32ToAuxInt(c << uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (ADDshiftRA x (MOVWconst [c]) [d])
- // result: (ADDconst x [int64(int32(c)>>uint64(d))])
+ // result: (ADDconst x [c>>uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMADDconst)
- v.AuxInt = int64(int32(c) >> uint64(d))
+ v.AuxInt = int32ToAuxInt(c >> uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (ADDshiftRL x (MOVWconst [c]) [d])
- // result: (ADDconst x [int64(int32(uint32(c)>>uint64(d)))])
+ // result: (ADDconst x [int32(uint32(c)>>uint64(d))])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMADDconst)
- v.AuxInt = int64(int32(uint32(c) >> uint64(d)))
+ v.AuxInt = int32ToAuxInt(int32(uint32(c) >> uint64(d)))
v.AddArg(x)
return true
}
}
// match: (ANDconst [c] x)
// cond: !isARMImmRot(uint32(c)) && isARMImmRot(^uint32(c))
- // result: (BICconst [int64(int32(^uint32(c)))] x)
+ // result: (BICconst [int32(^uint32(c))] x)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
x := v_0
if !(!isARMImmRot(uint32(c)) && isARMImmRot(^uint32(c))) {
break
}
v.reset(OpARMBICconst)
- v.AuxInt = int64(int32(^uint32(c)))
+ v.AuxInt = int32ToAuxInt(int32(^uint32(c)))
v.AddArg(x)
return true
}
// match: (ANDconst [c] x)
// cond: objabi.GOARM==7 && !isARMImmRot(uint32(c)) && uint32(c)>0xffff && ^uint32(c)<=0xffff
- // result: (BICconst [int64(int32(^uint32(c)))] x)
+ // result: (BICconst [int32(^uint32(c))] x)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
x := v_0
if !(objabi.GOARM == 7 && !isARMImmRot(uint32(c)) && uint32(c) > 0xffff && ^uint32(c) <= 0xffff) {
break
}
v.reset(OpARMBICconst)
- v.AuxInt = int64(int32(^uint32(c)))
+ v.AuxInt = int32ToAuxInt(int32(^uint32(c)))
v.AddArg(x)
return true
}
return true
}
// match: (ANDshiftLL x (MOVWconst [c]) [d])
- // result: (ANDconst x [int64(int32(uint32(c)<<uint64(d)))])
+ // result: (ANDconst x [c<<uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMANDconst)
- v.AuxInt = int64(int32(uint32(c) << uint64(d)))
+ v.AuxInt = int32ToAuxInt(c << uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (ANDshiftRA x (MOVWconst [c]) [d])
- // result: (ANDconst x [int64(int32(c)>>uint64(d))])
+ // result: (ANDconst x [c>>uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMANDconst)
- v.AuxInt = int64(int32(c) >> uint64(d))
+ v.AuxInt = int32ToAuxInt(c >> uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (ANDshiftRL x (MOVWconst [c]) [d])
- // result: (ANDconst x [int64(int32(uint32(c)>>uint64(d)))])
+ // result: (ANDconst x [int32(uint32(c)>>uint64(d))])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMANDconst)
- v.AuxInt = int64(int32(uint32(c) >> uint64(d)))
+ v.AuxInt = int32ToAuxInt(int32(uint32(c) >> uint64(d)))
v.AddArg(x)
return true
}
func rewriteValueARM_OpARMBFX(v *Value) bool {
v_0 := v.Args[0]
// match: (BFX [c] (MOVWconst [d]))
- // result: (MOVWconst [int64(int32(d)<<(32-uint32(c&0xff)-uint32(c>>8))>>(32-uint32(c>>8)))])
+ // result: (MOVWconst [d<<(32-uint32(c&0xff)-uint32(c>>8))>>(32-uint32(c>>8))])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMMOVWconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
v.reset(OpARMMOVWconst)
- v.AuxInt = int64(int32(d) << (32 - uint32(c&0xff) - uint32(c>>8)) >> (32 - uint32(c>>8)))
+ v.AuxInt = int32ToAuxInt(d << (32 - uint32(c&0xff) - uint32(c>>8)) >> (32 - uint32(c>>8)))
return true
}
return false
func rewriteValueARM_OpARMBFXU(v *Value) bool {
v_0 := v.Args[0]
// match: (BFXU [c] (MOVWconst [d]))
- // result: (MOVWconst [int64(int32(uint32(d)<<(32-uint32(c&0xff)-uint32(c>>8))>>(32-uint32(c>>8))))])
+ // result: (MOVWconst [int32(uint32(d)<<(32-uint32(c&0xff)-uint32(c>>8))>>(32-uint32(c>>8)))])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMMOVWconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
v.reset(OpARMMOVWconst)
- v.AuxInt = int64(int32(uint32(d) << (32 - uint32(c&0xff) - uint32(c>>8)) >> (32 - uint32(c>>8))))
+ v.AuxInt = int32ToAuxInt(int32(uint32(d) << (32 - uint32(c&0xff) - uint32(c>>8)) >> (32 - uint32(c>>8))))
return true
}
return false
}
// match: (BICconst [c] x)
// cond: !isARMImmRot(uint32(c)) && isARMImmRot(^uint32(c))
- // result: (ANDconst [int64(int32(^uint32(c)))] x)
+ // result: (ANDconst [int32(^uint32(c))] x)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
x := v_0
if !(!isARMImmRot(uint32(c)) && isARMImmRot(^uint32(c))) {
break
}
v.reset(OpARMANDconst)
- v.AuxInt = int64(int32(^uint32(c)))
+ v.AuxInt = int32ToAuxInt(int32(^uint32(c)))
v.AddArg(x)
return true
}
// match: (BICconst [c] x)
// cond: objabi.GOARM==7 && !isARMImmRot(uint32(c)) && uint32(c)>0xffff && ^uint32(c)<=0xffff
- // result: (ANDconst [int64(int32(^uint32(c)))] x)
+ // result: (ANDconst [int32(^uint32(c))] x)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
x := v_0
if !(objabi.GOARM == 7 && !isARMImmRot(uint32(c)) && uint32(c) > 0xffff && ^uint32(c) <= 0xffff) {
break
}
v.reset(OpARMANDconst)
- v.AuxInt = int64(int32(^uint32(c)))
+ v.AuxInt = int32ToAuxInt(int32(^uint32(c)))
v.AddArg(x)
return true
}
return true
}
// match: (BICconst [c] (BICconst [d] x))
- // result: (BICconst [int64(int32(c|d))] x)
+ // result: (BICconst [c|d] x)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMBICconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
x := v_0.Args[0]
v.reset(OpARMBICconst)
- v.AuxInt = int64(int32(c | d))
+ v.AuxInt = int32ToAuxInt(c | d)
v.AddArg(x)
return true
}
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (BICshiftLL x (MOVWconst [c]) [d])
- // result: (BICconst x [int64(int32(uint32(c)<<uint64(d)))])
+ // result: (BICconst x [c<<uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMBICconst)
- v.AuxInt = int64(int32(uint32(c) << uint64(d)))
+ v.AuxInt = int32ToAuxInt(c << uint64(d))
v.AddArg(x)
return true
}
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (BICshiftRA x (MOVWconst [c]) [d])
- // result: (BICconst x [int64(int32(c)>>uint64(d))])
+ // result: (BICconst x [c>>uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMBICconst)
- v.AuxInt = int64(int32(c) >> uint64(d))
+ v.AuxInt = int32ToAuxInt(c >> uint64(d))
v.AddArg(x)
return true
}
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (BICshiftRL x (MOVWconst [c]) [d])
- // result: (BICconst x [int64(int32(uint32(c)>>uint64(d)))])
+ // result: (BICconst x [int32(uint32(c)>>uint64(d))])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMBICconst)
- v.AuxInt = int64(int32(uint32(c) >> uint64(d)))
+ v.AuxInt = int32ToAuxInt(int32(uint32(c) >> uint64(d)))
v.AddArg(x)
return true
}
return true
}
// match: (CMNshiftLL x (MOVWconst [c]) [d])
- // result: (CMNconst x [int64(int32(uint32(c)<<uint64(d)))])
+ // result: (CMNconst x [c<<uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMCMNconst)
- v.AuxInt = int64(int32(uint32(c) << uint64(d)))
+ v.AuxInt = int32ToAuxInt(c << uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (CMNshiftRA x (MOVWconst [c]) [d])
- // result: (CMNconst x [int64(int32(c)>>uint64(d))])
+ // result: (CMNconst x [c>>uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMCMNconst)
- v.AuxInt = int64(int32(c) >> uint64(d))
+ v.AuxInt = int32ToAuxInt(c >> uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (CMNshiftRL x (MOVWconst [c]) [d])
- // result: (CMNconst x [int64(int32(uint32(c)>>uint64(d)))])
+ // result: (CMNconst x [int32(uint32(c)>>uint64(d))])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMCMNconst)
- v.AuxInt = int64(int32(uint32(c) >> uint64(d)))
+ v.AuxInt = int32ToAuxInt(int32(uint32(c) >> uint64(d)))
v.AddArg(x)
return true
}
return true
}
// match: (CMPshiftLL x (MOVWconst [c]) [d])
- // result: (CMPconst x [int64(int32(uint32(c)<<uint64(d)))])
+ // result: (CMPconst x [c<<uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMCMPconst)
- v.AuxInt = int64(int32(uint32(c) << uint64(d)))
+ v.AuxInt = int32ToAuxInt(c << uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (CMPshiftRA x (MOVWconst [c]) [d])
- // result: (CMPconst x [int64(int32(c)>>uint64(d))])
+ // result: (CMPconst x [c>>uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMCMPconst)
- v.AuxInt = int64(int32(c) >> uint64(d))
+ v.AuxInt = int32ToAuxInt(c >> uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (CMPshiftRL x (MOVWconst [c]) [d])
- // result: (CMPconst x [int64(int32(uint32(c)>>uint64(d)))])
+ // result: (CMPconst x [int32(uint32(c)>>uint64(d))])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMCMPconst)
- v.AuxInt = int64(int32(uint32(c) >> uint64(d)))
+ v.AuxInt = int32ToAuxInt(int32(uint32(c) >> uint64(d)))
v.AddArg(x)
return true
}
return true
}
// match: (MOVWloadshiftRA ptr (MOVWconst [c]) [d] mem)
- // result: (MOVWload [int64(int32(c)>>uint64(d))] ptr mem)
+ // result: (MOVWload [c>>uint64(d)] ptr mem)
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
ptr := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
mem := v_2
v.reset(OpARMMOVWload)
- v.AuxInt = int64(int32(c) >> uint64(d))
+ v.AuxInt = int32ToAuxInt(c >> uint64(d))
v.AddArg2(ptr, mem)
return true
}
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (MOVWstoreshiftRA ptr (MOVWconst [c]) [d] val mem)
- // result: (MOVWstore [int64(int32(c)>>uint64(d))] ptr val mem)
+ // result: (MOVWstore [c>>uint64(d)] ptr val mem)
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
ptr := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
val := v_2
mem := v_3
v.reset(OpARMMOVWstore)
- v.AuxInt = int64(int32(c) >> uint64(d))
+ v.AuxInt = int32ToAuxInt(c >> uint64(d))
v.AddArg3(ptr, val, mem)
return true
}
break
}
// match: (MUL (MOVWconst [c]) (MOVWconst [d]))
- // result: (MOVWconst [int64(int32(c*d))])
+ // result: (MOVWconst [c*d])
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
if v_0.Op != OpARMMOVWconst {
continue
}
- c := v_0.AuxInt
+ c := auxIntToInt32(v_0.AuxInt)
if v_1.Op != OpARMMOVWconst {
continue
}
- d := v_1.AuxInt
+ d := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMMOVWconst)
- v.AuxInt = int64(int32(c * d))
+ v.AuxInt = int32ToAuxInt(c * d)
return true
}
break
return true
}
// match: (MULA (MOVWconst [c]) (MOVWconst [d]) a)
- // result: (ADDconst [int64(int32(c*d))] a)
+ // result: (ADDconst [c*d] a)
for {
if v_0.Op != OpARMMOVWconst {
break
}
- c := v_0.AuxInt
+ c := auxIntToInt32(v_0.AuxInt)
if v_1.Op != OpARMMOVWconst {
break
}
- d := v_1.AuxInt
+ d := auxIntToInt32(v_1.AuxInt)
a := v_2
v.reset(OpARMADDconst)
- v.AuxInt = int64(int32(c * d))
+ v.AuxInt = int32ToAuxInt(c * d)
v.AddArg(a)
return true
}
return true
}
// match: (MULS (MOVWconst [c]) (MOVWconst [d]) a)
- // result: (SUBconst [int64(int32(c*d))] a)
+ // result: (SUBconst [c*d] a)
for {
if v_0.Op != OpARMMOVWconst {
break
}
- c := v_0.AuxInt
+ c := auxIntToInt32(v_0.AuxInt)
if v_1.Op != OpARMMOVWconst {
break
}
- d := v_1.AuxInt
+ d := auxIntToInt32(v_1.AuxInt)
a := v_2
v.reset(OpARMSUBconst)
- v.AuxInt = int64(int32(c * d))
+ v.AuxInt = int32ToAuxInt(c * d)
v.AddArg(a)
return true
}
func rewriteValueARM_OpARMMVNshiftLL(v *Value) bool {
v_0 := v.Args[0]
// match: (MVNshiftLL (MOVWconst [c]) [d])
- // result: (MOVWconst [^int64(uint32(c)<<uint64(d))])
+ // result: (MOVWconst [^(c<<uint64(d))])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMMOVWconst {
break
}
- c := v_0.AuxInt
+ c := auxIntToInt32(v_0.AuxInt)
v.reset(OpARMMOVWconst)
- v.AuxInt = ^int64(uint32(c) << uint64(d))
+ v.AuxInt = int32ToAuxInt(^(c << uint64(d)))
return true
}
return false
return true
}
// match: (ORshiftLL x (MOVWconst [c]) [d])
- // result: (ORconst x [int64(int32(uint32(c)<<uint64(d)))])
+ // result: (ORconst x [c<<uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMORconst)
- v.AuxInt = int64(int32(uint32(c) << uint64(d)))
+ v.AuxInt = int32ToAuxInt(c << uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (ORshiftRA x (MOVWconst [c]) [d])
- // result: (ORconst x [int64(int32(c)>>uint64(d))])
+ // result: (ORconst x [c>>uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMORconst)
- v.AuxInt = int64(int32(c) >> uint64(d))
+ v.AuxInt = int32ToAuxInt(c >> uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (ORshiftRL x (MOVWconst [c]) [d])
- // result: (ORconst x [int64(int32(uint32(c)>>uint64(d)))])
+ // result: (ORconst x [int32(uint32(c)>>uint64(d))])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMORconst)
- v.AuxInt = int64(int32(uint32(c) >> uint64(d)))
+ v.AuxInt = int32ToAuxInt(int32(uint32(c) >> uint64(d)))
v.AddArg(x)
return true
}
return true
}
// match: (RSBSshiftLL x (MOVWconst [c]) [d])
- // result: (RSBSconst x [int64(int32(uint32(c)<<uint64(d)))])
+ // result: (RSBSconst x [c<<uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMRSBSconst)
- v.AuxInt = int64(int32(uint32(c) << uint64(d)))
+ v.AuxInt = int32ToAuxInt(c << uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (RSBSshiftRA x (MOVWconst [c]) [d])
- // result: (RSBSconst x [int64(int32(c)>>uint64(d))])
+ // result: (RSBSconst x [c>>uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMRSBSconst)
- v.AuxInt = int64(int32(c) >> uint64(d))
+ v.AuxInt = int32ToAuxInt(c >> uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (RSBSshiftRL x (MOVWconst [c]) [d])
- // result: (RSBSconst x [int64(int32(uint32(c)>>uint64(d)))])
+ // result: (RSBSconst x [int32(uint32(c)>>uint64(d))])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMRSBSconst)
- v.AuxInt = int64(int32(uint32(c) >> uint64(d)))
+ v.AuxInt = int32ToAuxInt(int32(uint32(c) >> uint64(d)))
v.AddArg(x)
return true
}
func rewriteValueARM_OpARMRSBconst(v *Value) bool {
v_0 := v.Args[0]
// match: (RSBconst [c] (MOVWconst [d]))
- // result: (MOVWconst [int64(int32(c-d))])
+ // result: (MOVWconst [c-d])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMMOVWconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
v.reset(OpARMMOVWconst)
- v.AuxInt = int64(int32(c - d))
+ v.AuxInt = int32ToAuxInt(c - d)
return true
}
// match: (RSBconst [c] (RSBconst [d] x))
- // result: (ADDconst [int64(int32(c-d))] x)
+ // result: (ADDconst [c-d] x)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMRSBconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
x := v_0.Args[0]
v.reset(OpARMADDconst)
- v.AuxInt = int64(int32(c - d))
+ v.AuxInt = int32ToAuxInt(c - d)
v.AddArg(x)
return true
}
// match: (RSBconst [c] (ADDconst [d] x))
- // result: (RSBconst [int64(int32(c-d))] x)
+ // result: (RSBconst [c-d] x)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMADDconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
x := v_0.Args[0]
v.reset(OpARMRSBconst)
- v.AuxInt = int64(int32(c - d))
+ v.AuxInt = int32ToAuxInt(c - d)
v.AddArg(x)
return true
}
// match: (RSBconst [c] (SUBconst [d] x))
- // result: (RSBconst [int64(int32(c+d))] x)
+ // result: (RSBconst [c+d] x)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMSUBconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
x := v_0.Args[0]
v.reset(OpARMRSBconst)
- v.AuxInt = int64(int32(c + d))
+ v.AuxInt = int32ToAuxInt(c + d)
v.AddArg(x)
return true
}
return true
}
// match: (RSBshiftLL x (MOVWconst [c]) [d])
- // result: (RSBconst x [int64(int32(uint32(c)<<uint64(d)))])
+ // result: (RSBconst x [c<<uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMRSBconst)
- v.AuxInt = int64(int32(uint32(c) << uint64(d)))
+ v.AuxInt = int32ToAuxInt(c << uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (RSBshiftRA x (MOVWconst [c]) [d])
- // result: (RSBconst x [int64(int32(c)>>uint64(d))])
+ // result: (RSBconst x [c>>uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMRSBconst)
- v.AuxInt = int64(int32(c) >> uint64(d))
+ v.AuxInt = int32ToAuxInt(c >> uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (RSBshiftRL x (MOVWconst [c]) [d])
- // result: (RSBconst x [int64(int32(uint32(c)>>uint64(d)))])
+ // result: (RSBconst x [int32(uint32(c)>>uint64(d))])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMRSBconst)
- v.AuxInt = int64(int32(uint32(c) >> uint64(d)))
+ v.AuxInt = int32ToAuxInt(int32(uint32(c) >> uint64(d)))
v.AddArg(x)
return true
}
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (RSCconst [c] (ADDconst [d] x) flags)
- // result: (RSCconst [int64(int32(c-d))] x flags)
+ // result: (RSCconst [c-d] x flags)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMADDconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
x := v_0.Args[0]
flags := v_1
v.reset(OpARMRSCconst)
- v.AuxInt = int64(int32(c - d))
+ v.AuxInt = int32ToAuxInt(c - d)
v.AddArg2(x, flags)
return true
}
// match: (RSCconst [c] (SUBconst [d] x) flags)
- // result: (RSCconst [int64(int32(c+d))] x flags)
+ // result: (RSCconst [c+d] x flags)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMSUBconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
x := v_0.Args[0]
flags := v_1
v.reset(OpARMRSCconst)
- v.AuxInt = int64(int32(c + d))
+ v.AuxInt = int32ToAuxInt(c + d)
v.AddArg2(x, flags)
return true
}
return true
}
// match: (RSCshiftLL x (MOVWconst [c]) [d] flags)
- // result: (RSCconst x [int64(int32(uint32(c)<<uint64(d)))] flags)
+ // result: (RSCconst x [c<<uint64(d)] flags)
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
flags := v_2
v.reset(OpARMRSCconst)
- v.AuxInt = int64(int32(uint32(c) << uint64(d)))
+ v.AuxInt = int32ToAuxInt(c << uint64(d))
v.AddArg2(x, flags)
return true
}
return true
}
// match: (RSCshiftRA x (MOVWconst [c]) [d] flags)
- // result: (RSCconst x [int64(int32(c)>>uint64(d))] flags)
+ // result: (RSCconst x [c>>uint64(d)] flags)
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
flags := v_2
v.reset(OpARMRSCconst)
- v.AuxInt = int64(int32(c) >> uint64(d))
+ v.AuxInt = int32ToAuxInt(c >> uint64(d))
v.AddArg2(x, flags)
return true
}
return true
}
// match: (RSCshiftRL x (MOVWconst [c]) [d] flags)
- // result: (RSCconst x [int64(int32(uint32(c)>>uint64(d)))] flags)
+ // result: (RSCconst x [int32(uint32(c)>>uint64(d))] flags)
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
flags := v_2
v.reset(OpARMRSCconst)
- v.AuxInt = int64(int32(uint32(c) >> uint64(d)))
+ v.AuxInt = int32ToAuxInt(int32(uint32(c) >> uint64(d)))
v.AddArg2(x, flags)
return true
}
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (SBCconst [c] (ADDconst [d] x) flags)
- // result: (SBCconst [int64(int32(c-d))] x flags)
+ // result: (SBCconst [c-d] x flags)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMADDconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
x := v_0.Args[0]
flags := v_1
v.reset(OpARMSBCconst)
- v.AuxInt = int64(int32(c - d))
+ v.AuxInt = int32ToAuxInt(c - d)
v.AddArg2(x, flags)
return true
}
// match: (SBCconst [c] (SUBconst [d] x) flags)
- // result: (SBCconst [int64(int32(c+d))] x flags)
+ // result: (SBCconst [c+d] x flags)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMSUBconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
x := v_0.Args[0]
flags := v_1
v.reset(OpARMSBCconst)
- v.AuxInt = int64(int32(c + d))
+ v.AuxInt = int32ToAuxInt(c + d)
v.AddArg2(x, flags)
return true
}
return true
}
// match: (SBCshiftLL x (MOVWconst [c]) [d] flags)
- // result: (SBCconst x [int64(int32(uint32(c)<<uint64(d)))] flags)
+ // result: (SBCconst x [c<<uint64(d)] flags)
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
flags := v_2
v.reset(OpARMSBCconst)
- v.AuxInt = int64(int32(uint32(c) << uint64(d)))
+ v.AuxInt = int32ToAuxInt(c << uint64(d))
v.AddArg2(x, flags)
return true
}
return true
}
// match: (SBCshiftRA x (MOVWconst [c]) [d] flags)
- // result: (SBCconst x [int64(int32(c)>>uint64(d))] flags)
+ // result: (SBCconst x [c>>uint64(d)] flags)
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
flags := v_2
v.reset(OpARMSBCconst)
- v.AuxInt = int64(int32(c) >> uint64(d))
+ v.AuxInt = int32ToAuxInt(c >> uint64(d))
v.AddArg2(x, flags)
return true
}
return true
}
// match: (SBCshiftRL x (MOVWconst [c]) [d] flags)
- // result: (SBCconst x [int64(int32(uint32(c)>>uint64(d)))] flags)
+ // result: (SBCconst x [int32(uint32(c)>>uint64(d))] flags)
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
flags := v_2
v.reset(OpARMSBCconst)
- v.AuxInt = int64(int32(uint32(c) >> uint64(d)))
+ v.AuxInt = int32ToAuxInt(int32(uint32(c) >> uint64(d)))
v.AddArg2(x, flags)
return true
}
func rewriteValueARM_OpARMSLLconst(v *Value) bool {
v_0 := v.Args[0]
// match: (SLLconst [c] (MOVWconst [d]))
- // result: (MOVWconst [int64(int32(uint32(d)<<uint64(c)))])
+ // result: (MOVWconst [d<<uint64(c)])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMMOVWconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
v.reset(OpARMMOVWconst)
- v.AuxInt = int64(int32(uint32(d) << uint64(c)))
+ v.AuxInt = int32ToAuxInt(d << uint64(c))
return true
}
return false
func rewriteValueARM_OpARMSRAconst(v *Value) bool {
v_0 := v.Args[0]
// match: (SRAconst [c] (MOVWconst [d]))
- // result: (MOVWconst [int64(int32(d)>>uint64(c))])
+ // result: (MOVWconst [d>>uint64(c)])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMMOVWconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
v.reset(OpARMMOVWconst)
- v.AuxInt = int64(int32(d) >> uint64(c))
+ v.AuxInt = int32ToAuxInt(d >> uint64(c))
return true
}
// match: (SRAconst (SLLconst x [c]) [d])
func rewriteValueARM_OpARMSRLconst(v *Value) bool {
v_0 := v.Args[0]
// match: (SRLconst [c] (MOVWconst [d]))
- // result: (MOVWconst [int64(int32(uint32(d)>>uint64(c)))])
+ // result: (MOVWconst [int32(uint32(d)>>uint64(c))])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMMOVWconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
v.reset(OpARMMOVWconst)
- v.AuxInt = int64(int32(uint32(d) >> uint64(c)))
+ v.AuxInt = int32ToAuxInt(int32(uint32(d) >> uint64(c)))
return true
}
// match: (SRLconst (SLLconst x [c]) [d])
return true
}
// match: (SUBSshiftLL x (MOVWconst [c]) [d])
- // result: (SUBSconst x [int64(int32(uint32(c)<<uint64(d)))])
+ // result: (SUBSconst x [c<<uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMSUBSconst)
- v.AuxInt = int64(int32(uint32(c) << uint64(d)))
+ v.AuxInt = int32ToAuxInt(c << uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (SUBSshiftRA x (MOVWconst [c]) [d])
- // result: (SUBSconst x [int64(int32(c)>>uint64(d))])
+ // result: (SUBSconst x [c>>uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMSUBSconst)
- v.AuxInt = int64(int32(c) >> uint64(d))
+ v.AuxInt = int32ToAuxInt(c >> uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (SUBSshiftRL x (MOVWconst [c]) [d])
- // result: (SUBSconst x [int64(int32(uint32(c)>>uint64(d)))])
+ // result: (SUBSconst x [int32(uint32(c)>>uint64(d))])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMSUBSconst)
- v.AuxInt = int64(int32(uint32(c) >> uint64(d)))
+ v.AuxInt = int32ToAuxInt(int32(uint32(c) >> uint64(d)))
v.AddArg(x)
return true
}
}
// match: (SUBconst [c] x)
// cond: !isARMImmRot(uint32(c)) && isARMImmRot(uint32(-c))
- // result: (ADDconst [int64(int32(-c))] x)
+ // result: (ADDconst [-c] x)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
x := v_0
if !(!isARMImmRot(uint32(c)) && isARMImmRot(uint32(-c))) {
break
}
v.reset(OpARMADDconst)
- v.AuxInt = int64(int32(-c))
+ v.AuxInt = int32ToAuxInt(-c)
v.AddArg(x)
return true
}
// match: (SUBconst [c] x)
// cond: objabi.GOARM==7 && !isARMImmRot(uint32(c)) && uint32(c)>0xffff && uint32(-c)<=0xffff
- // result: (ANDconst [int64(int32(-c))] x)
+ // result: (ADDconst [-c] x)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
x := v_0
if !(objabi.GOARM == 7 && !isARMImmRot(uint32(c)) && uint32(c) > 0xffff && uint32(-c) <= 0xffff) {
break
}
- v.reset(OpARMANDconst)
- v.AuxInt = int64(int32(-c))
+ v.reset(OpARMADDconst)
+ v.AuxInt = int32ToAuxInt(-c)
v.AddArg(x)
return true
}
// match: (SUBconst [c] (MOVWconst [d]))
- // result: (MOVWconst [int64(int32(d-c))])
+ // result: (MOVWconst [d-c])
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMMOVWconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
v.reset(OpARMMOVWconst)
- v.AuxInt = int64(int32(d - c))
+ v.AuxInt = int32ToAuxInt(d - c)
return true
}
// match: (SUBconst [c] (SUBconst [d] x))
- // result: (ADDconst [int64(int32(-c-d))] x)
+ // result: (ADDconst [-c-d] x)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMSUBconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
x := v_0.Args[0]
v.reset(OpARMADDconst)
- v.AuxInt = int64(int32(-c - d))
+ v.AuxInt = int32ToAuxInt(-c - d)
v.AddArg(x)
return true
}
// match: (SUBconst [c] (ADDconst [d] x))
- // result: (ADDconst [int64(int32(-c+d))] x)
+ // result: (ADDconst [-c+d] x)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMADDconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
x := v_0.Args[0]
v.reset(OpARMADDconst)
- v.AuxInt = int64(int32(-c + d))
+ v.AuxInt = int32ToAuxInt(-c + d)
v.AddArg(x)
return true
}
// match: (SUBconst [c] (RSBconst [d] x))
- // result: (RSBconst [int64(int32(-c+d))] x)
+ // result: (RSBconst [-c+d] x)
for {
- c := v.AuxInt
+ c := auxIntToInt32(v.AuxInt)
if v_0.Op != OpARMRSBconst {
break
}
- d := v_0.AuxInt
+ d := auxIntToInt32(v_0.AuxInt)
x := v_0.Args[0]
v.reset(OpARMRSBconst)
- v.AuxInt = int64(int32(-c + d))
+ v.AuxInt = int32ToAuxInt(-c + d)
v.AddArg(x)
return true
}
return true
}
// match: (SUBshiftLL x (MOVWconst [c]) [d])
- // result: (SUBconst x [int64(int32(uint32(c)<<uint64(d)))])
+ // result: (SUBconst x [c<<uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMSUBconst)
- v.AuxInt = int64(int32(uint32(c) << uint64(d)))
+ v.AuxInt = int32ToAuxInt(c << uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (SUBshiftRA x (MOVWconst [c]) [d])
- // result: (SUBconst x [int64(int32(c)>>uint64(d))])
+ // result: (SUBconst x [c>>uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMSUBconst)
- v.AuxInt = int64(int32(c) >> uint64(d))
+ v.AuxInt = int32ToAuxInt(c >> uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (SUBshiftRL x (MOVWconst [c]) [d])
- // result: (SUBconst x [int64(int32(uint32(c)>>uint64(d)))])
+ // result: (SUBconst x [int32(uint32(c)>>uint64(d))])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMSUBconst)
- v.AuxInt = int64(int32(uint32(c) >> uint64(d)))
+ v.AuxInt = int32ToAuxInt(int32(uint32(c) >> uint64(d)))
v.AddArg(x)
return true
}
return true
}
// match: (TEQshiftLL x (MOVWconst [c]) [d])
- // result: (TEQconst x [int64(int32(uint32(c)<<uint64(d)))])
+ // result: (TEQconst x [c<<uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMTEQconst)
- v.AuxInt = int64(int32(uint32(c) << uint64(d)))
+ v.AuxInt = int32ToAuxInt(c << uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (TEQshiftRA x (MOVWconst [c]) [d])
- // result: (TEQconst x [int64(int32(c)>>uint64(d))])
+ // result: (TEQconst x [c>>uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMTEQconst)
- v.AuxInt = int64(int32(c) >> uint64(d))
+ v.AuxInt = int32ToAuxInt(c >> uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (TEQshiftRL x (MOVWconst [c]) [d])
- // result: (TEQconst x [int64(int32(uint32(c)>>uint64(d)))])
+ // result: (TEQconst x [int32(uint32(c)>>uint64(d))])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMTEQconst)
- v.AuxInt = int64(int32(uint32(c) >> uint64(d)))
+ v.AuxInt = int32ToAuxInt(int32(uint32(c) >> uint64(d)))
v.AddArg(x)
return true
}
return true
}
// match: (TSTshiftLL x (MOVWconst [c]) [d])
- // result: (TSTconst x [int64(int32(uint32(c)<<uint64(d)))])
+ // result: (TSTconst x [c<<uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMTSTconst)
- v.AuxInt = int64(int32(uint32(c) << uint64(d)))
+ v.AuxInt = int32ToAuxInt(c << uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (TSTshiftRA x (MOVWconst [c]) [d])
- // result: (TSTconst x [int64(int32(c)>>uint64(d))])
+ // result: (TSTconst x [c>>uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMTSTconst)
- v.AuxInt = int64(int32(c) >> uint64(d))
+ v.AuxInt = int32ToAuxInt(c >> uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (TSTshiftRL x (MOVWconst [c]) [d])
- // result: (TSTconst x [int64(int32(uint32(c)>>uint64(d)))])
+ // result: (TSTconst x [int32(uint32(c)>>uint64(d))])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMTSTconst)
- v.AuxInt = int64(int32(uint32(c) >> uint64(d)))
+ v.AuxInt = int32ToAuxInt(int32(uint32(c) >> uint64(d)))
v.AddArg(x)
return true
}
return true
}
// match: (XORshiftLL x (MOVWconst [c]) [d])
- // result: (XORconst x [int64(int32(uint32(c)<<uint64(d)))])
+ // result: (XORconst x [c<<uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMXORconst)
- v.AuxInt = int64(int32(uint32(c) << uint64(d)))
+ v.AuxInt = int32ToAuxInt(c << uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (XORshiftRA x (MOVWconst [c]) [d])
- // result: (XORconst x [int64(int32(c)>>uint64(d))])
+ // result: (XORconst x [c>>uint64(d)])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMXORconst)
- v.AuxInt = int64(int32(c) >> uint64(d))
+ v.AuxInt = int32ToAuxInt(c >> uint64(d))
v.AddArg(x)
return true
}
return true
}
// match: (XORshiftRL x (MOVWconst [c]) [d])
- // result: (XORconst x [int64(int32(uint32(c)>>uint64(d)))])
+ // result: (XORconst x [int32(uint32(c)>>uint64(d))])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMXORconst)
- v.AuxInt = int64(int32(uint32(c) >> uint64(d)))
+ v.AuxInt = int32ToAuxInt(int32(uint32(c) >> uint64(d)))
v.AddArg(x)
return true
}
return true
}
// match: (XORshiftRR x (MOVWconst [c]) [d])
- // result: (XORconst x [int64(int32(uint32(c)>>uint64(d)|uint32(c)<<uint64(32-d)))])
+ // result: (XORconst x [int32(uint32(c)>>uint64(d)|uint32(c)<<uint64(32-d))])
for {
- d := v.AuxInt
+ d := auxIntToInt32(v.AuxInt)
x := v_0
if v_1.Op != OpARMMOVWconst {
break
}
- c := v_1.AuxInt
+ c := auxIntToInt32(v_1.AuxInt)
v.reset(OpARMXORconst)
- v.AuxInt = int64(int32(uint32(c)>>uint64(d) | uint32(c)<<uint64(32-d)))
+ v.AuxInt = int32ToAuxInt(int32(uint32(c)>>uint64(d) | uint32(c)<<uint64(32-d)))
v.AddArg(x)
return true
}
return true
}
// match: (Select0 (CALLudiv (MOVWconst [c]) (MOVWconst [d])))
- // result: (MOVWconst [int64(int32(uint32(c)/uint32(d)))])
+ // result: (MOVWconst [int32(uint32(c)/uint32(d))])
for {
if v_0.Op != OpARMCALLudiv {
break
if v_0_0.Op != OpARMMOVWconst {
break
}
- c := v_0_0.AuxInt
+ c := auxIntToInt32(v_0_0.AuxInt)
v_0_1 := v_0.Args[1]
if v_0_1.Op != OpARMMOVWconst {
break
}
- d := v_0_1.AuxInt
+ d := auxIntToInt32(v_0_1.AuxInt)
v.reset(OpARMMOVWconst)
- v.AuxInt = int64(int32(uint32(c) / uint32(d)))
+ v.AuxInt = int32ToAuxInt(int32(uint32(c) / uint32(d)))
return true
}
return false
return true
}
// match: (Select1 (CALLudiv (MOVWconst [c]) (MOVWconst [d])))
- // result: (MOVWconst [int64(int32(uint32(c)%uint32(d)))])
+ // result: (MOVWconst [int32(uint32(c)%uint32(d))])
for {
if v_0.Op != OpARMCALLudiv {
break
if v_0_0.Op != OpARMMOVWconst {
break
}
- c := v_0_0.AuxInt
+ c := auxIntToInt32(v_0_0.AuxInt)
v_0_1 := v_0.Args[1]
if v_0_1.Op != OpARMMOVWconst {
break
}
- d := v_0_1.AuxInt
+ d := auxIntToInt32(v_0_1.AuxInt)
v.reset(OpARMMOVWconst)
- v.AuxInt = int64(int32(uint32(c) % uint32(d)))
+ v.AuxInt = int32ToAuxInt(int32(uint32(c) % uint32(d)))
return true
}
return false