(Leq16U x y) => (LessEqualU (CMP (ZeroExt16to32 x) (ZeroExt16to32 y)))
(Leq32U x y) => (LessEqualU (CMP x y))
-(OffPtr [off] ptr:(SP)) -> (MOVWaddr [off] ptr)
-(OffPtr [off] ptr) -> (ADDconst [off] ptr)
+(OffPtr [off] ptr:(SP)) => (MOVWaddr [int32(off)] ptr)
+(OffPtr [off] ptr) => (ADDconst [int32(off)] ptr)
(Addr ...) -> (MOVWaddr ...)
(LocalAddr {sym} base _) => (MOVWaddr {sym} base)
(GE (CMPconst [0] l:(XORshiftRLreg x y z)) yes no) && l.Uses==1 => (GE (TEQshiftRLreg x y z) yes no)
(GE (CMPconst [0] l:(XORshiftRAreg x y z)) yes no) && l.Uses==1 => (GE (TEQshiftRAreg x y z) yes no)
-(MOVBUload [off] {sym} (SB) _) && symIsRO(sym) -> (MOVWconst [int64(read8(sym, off))])
-(MOVHUload [off] {sym} (SB) _) && symIsRO(sym) -> (MOVWconst [int64(read16(sym, off, config.ctxt.Arch.ByteOrder))])
-(MOVWload [off] {sym} (SB) _) && symIsRO(sym) -> (MOVWconst [int64(int32(read32(sym, off, config.ctxt.Arch.ByteOrder)))])
+(MOVBUload [off] {sym} (SB) _) && symIsRO(sym) => (MOVWconst [int32(read8(sym, int64(off)))])
+(MOVHUload [off] {sym} (SB) _) && symIsRO(sym) => (MOVWconst [int32(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))])
+(MOVWload [off] {sym} (SB) _) && symIsRO(sym) => (MOVWconst [int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))])
}
// match: (MOVBUload [off] {sym} (SB) _)
// cond: symIsRO(sym)
- // result: (MOVWconst [int64(read8(sym, off))])
+ // result: (MOVWconst [int32(read8(sym, int64(off)))])
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
if v_0.Op != OpSB || !(symIsRO(sym)) {
break
}
v.reset(OpARMMOVWconst)
- v.AuxInt = int64(read8(sym, off))
+ v.AuxInt = int32ToAuxInt(int32(read8(sym, int64(off))))
return true
}
return false
}
// match: (MOVHUload [off] {sym} (SB) _)
// cond: symIsRO(sym)
- // result: (MOVWconst [int64(read16(sym, off, config.ctxt.Arch.ByteOrder))])
+ // result: (MOVWconst [int32(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))])
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
if v_0.Op != OpSB || !(symIsRO(sym)) {
break
}
v.reset(OpARMMOVWconst)
- v.AuxInt = int64(read16(sym, off, config.ctxt.Arch.ByteOrder))
+ v.AuxInt = int32ToAuxInt(int32(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))
return true
}
return false
}
// match: (MOVWload [off] {sym} (SB) _)
// cond: symIsRO(sym)
- // result: (MOVWconst [int64(int32(read32(sym, off, config.ctxt.Arch.ByteOrder)))])
+ // result: (MOVWconst [int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))])
for {
- off := v.AuxInt
- sym := v.Aux
+ off := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
if v_0.Op != OpSB || !(symIsRO(sym)) {
break
}
v.reset(OpARMMOVWconst)
- v.AuxInt = int64(int32(read32(sym, off, config.ctxt.Arch.ByteOrder)))
+ v.AuxInt = int32ToAuxInt(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))
return true
}
return false
func rewriteValueARM_OpOffPtr(v *Value) bool {
v_0 := v.Args[0]
// match: (OffPtr [off] ptr:(SP))
- // result: (MOVWaddr [off] ptr)
+ // result: (MOVWaddr [int32(off)] ptr)
for {
- off := v.AuxInt
+ off := auxIntToInt64(v.AuxInt)
ptr := v_0
if ptr.Op != OpSP {
break
}
v.reset(OpARMMOVWaddr)
- v.AuxInt = off
+ v.AuxInt = int32ToAuxInt(int32(off))
v.AddArg(ptr)
return true
}
// match: (OffPtr [off] ptr)
- // result: (ADDconst [off] ptr)
+ // result: (ADDconst [int32(off)] ptr)
for {
- off := v.AuxInt
+ off := auxIntToInt64(v.AuxInt)
ptr := v_0
v.reset(OpARMADDconst)
- v.AuxInt = off
+ v.AuxInt = int32ToAuxInt(int32(off))
v.AddArg(ptr)
return true
}