p := s.Prog(v.Op.Asm())
p.From.Type = obj.TYPE_MEM
p.From.Reg = v.Args[0].Reg()
- ssagen.AddAux2(&p.From, v, sc.Off())
+ ssagen.AddAux2(&p.From, v, sc.Off64())
p.To.Type = obj.TYPE_CONST
- p.To.Offset = sc.Val()
+ p.To.Offset = sc.Val64()
case ssa.OpAMD64CMPQloadidx8, ssa.OpAMD64CMPQloadidx1, ssa.OpAMD64CMPLloadidx4, ssa.OpAMD64CMPLloadidx1, ssa.OpAMD64CMPWloadidx2, ssa.OpAMD64CMPWloadidx1, ssa.OpAMD64CMPBloadidx1:
p := s.Prog(v.Op.Asm())
memIdx(&p.From, v)
sc := v.AuxValAndOff()
p := s.Prog(v.Op.Asm())
memIdx(&p.From, v)
- ssagen.AddAux2(&p.From, v, sc.Off())
+ ssagen.AddAux2(&p.From, v, sc.Off64())
p.To.Type = obj.TYPE_CONST
- p.To.Offset = sc.Val()
+ p.To.Offset = sc.Val64()
case ssa.OpAMD64MOVLconst, ssa.OpAMD64MOVQconst:
x := v.Reg()
ssagen.AddAux(&p.To, v)
case ssa.OpAMD64ADDQconstmodify, ssa.OpAMD64ADDLconstmodify:
sc := v.AuxValAndOff()
- off := sc.Off()
+ off := sc.Off64()
val := sc.Val()
if val == 1 || val == -1 {
var asm obj.As
ssa.OpAMD64BTCQconstmodify, ssa.OpAMD64BTCLconstmodify, ssa.OpAMD64BTSQconstmodify, ssa.OpAMD64BTSLconstmodify,
ssa.OpAMD64BTRQconstmodify, ssa.OpAMD64BTRLconstmodify, ssa.OpAMD64XORQconstmodify, ssa.OpAMD64XORLconstmodify:
sc := v.AuxValAndOff()
- off := sc.Off()
- val := sc.Val()
+ off := sc.Off64()
+ val := sc.Val64()
p := s.Prog(v.Op.Asm())
p.From.Type = obj.TYPE_CONST
p.From.Offset = val
p := s.Prog(v.Op.Asm())
p.From.Type = obj.TYPE_CONST
sc := v.AuxValAndOff()
- p.From.Offset = sc.Val()
+ p.From.Offset = sc.Val64()
p.To.Type = obj.TYPE_MEM
p.To.Reg = v.Args[0].Reg()
- ssagen.AddAux2(&p.To, v, sc.Off())
+ ssagen.AddAux2(&p.To, v, sc.Off64())
case ssa.OpAMD64MOVOstorezero:
if s.ABI != obj.ABIInternal {
v.Fatalf("MOVOstorezero can be only used in ABIInternal functions")
p := s.Prog(v.Op.Asm())
p.From.Type = obj.TYPE_CONST
sc := v.AuxValAndOff()
- p.From.Offset = sc.Val()
+ p.From.Offset = sc.Val64()
switch {
case p.As == x86.AADDQ && p.From.Offset == 1:
p.As = x86.AINCQ
p.From.Type = obj.TYPE_NONE
}
memIdx(&p.To, v)
- ssagen.AddAux2(&p.To, v, sc.Off())
+ ssagen.AddAux2(&p.To, v, sc.Off64())
case ssa.OpAMD64MOVLQSX, ssa.OpAMD64MOVWQSX, ssa.OpAMD64MOVBQSX, ssa.OpAMD64MOVLQZX, ssa.OpAMD64MOVWQZX, ssa.OpAMD64MOVBQZX,
ssa.OpAMD64CVTTSS2SL, ssa.OpAMD64CVTTSD2SL, ssa.OpAMD64CVTTSS2SQ, ssa.OpAMD64CVTTSD2SQ,
ssa.OpAMD64CVTSS2SD, ssa.OpAMD64CVTSD2SS:
p := s.Prog(v.Op.Asm())
p.From.Type = obj.TYPE_CONST
sc := v.AuxValAndOff()
- p.From.Offset = sc.Val()
+ p.From.Offset = sc.Val64()
p.To.Type = obj.TYPE_MEM
p.To.Reg = v.Args[0].Reg()
- ssagen.AddAux2(&p.To, v, sc.Off())
+ ssagen.AddAux2(&p.To, v, sc.Off64())
case ssa.OpS390XMOVBreg, ssa.OpS390XMOVHreg, ssa.OpS390XMOVWreg,
ssa.OpS390XMOVBZreg, ssa.OpS390XMOVHZreg, ssa.OpS390XMOVWZreg,
ssa.OpS390XLDGR, ssa.OpS390XLGDR,
p := s.Prog(v.Op.Asm())
p.From.Type = obj.TYPE_CONST
sc := v.AuxValAndOff()
- p.From.Offset = sc.Val()
+ p.From.Offset = sc.Val64()
p.To.Type = obj.TYPE_MEM
p.To.Reg = v.Args[0].Reg()
- ssagen.AddAux2(&p.To, v, sc.Off())
+ ssagen.AddAux2(&p.To, v, sc.Off64())
case ssa.OpCopy:
if v.Type.IsMemory() {
return
vo := v.AuxValAndOff()
p := s.Prog(s390x.AMVC)
p.From.Type = obj.TYPE_CONST
- p.From.Offset = vo.Val()
+ p.From.Offset = vo.Val64()
p.SetFrom3(obj.Addr{
Type: obj.TYPE_MEM,
Reg: v.Args[1].Reg(),
- Offset: vo.Off(),
+ Offset: vo.Off64(),
})
p.To.Type = obj.TYPE_MEM
p.To.Reg = v.Args[0].Reg()
- p.To.Offset = vo.Off()
+ p.To.Offset = vo.Off64()
case ssa.OpS390XSTMG2, ssa.OpS390XSTMG3, ssa.OpS390XSTMG4,
ssa.OpS390XSTM2, ssa.OpS390XSTM3, ssa.OpS390XSTM4:
for i := 2; i < len(v.Args)-1; i++ {
(Zero [4] destptr mem) => (MOVLstoreconst [0] destptr mem)
(Zero [3] destptr mem) =>
- (MOVBstoreconst [makeValAndOff32(0,2)] destptr
- (MOVWstoreconst [makeValAndOff32(0,0)] destptr mem))
+ (MOVBstoreconst [makeValAndOff(0,2)] destptr
+ (MOVWstoreconst [makeValAndOff(0,0)] destptr mem))
(Zero [5] destptr mem) =>
- (MOVBstoreconst [makeValAndOff32(0,4)] destptr
- (MOVLstoreconst [makeValAndOff32(0,0)] destptr mem))
+ (MOVBstoreconst [makeValAndOff(0,4)] destptr
+ (MOVLstoreconst [makeValAndOff(0,0)] destptr mem))
(Zero [6] destptr mem) =>
- (MOVWstoreconst [makeValAndOff32(0,4)] destptr
- (MOVLstoreconst [makeValAndOff32(0,0)] destptr mem))
+ (MOVWstoreconst [makeValAndOff(0,4)] destptr
+ (MOVLstoreconst [makeValAndOff(0,0)] destptr mem))
(Zero [7] destptr mem) =>
- (MOVLstoreconst [makeValAndOff32(0,3)] destptr
- (MOVLstoreconst [makeValAndOff32(0,0)] destptr mem))
+ (MOVLstoreconst [makeValAndOff(0,3)] destptr
+ (MOVLstoreconst [makeValAndOff(0,0)] destptr mem))
// Strip off any fractional word zeroing.
(Zero [s] destptr mem) && s%4 != 0 && s > 4 =>
// Zero small numbers of words directly.
(Zero [8] destptr mem) =>
- (MOVLstoreconst [makeValAndOff32(0,4)] destptr
- (MOVLstoreconst [makeValAndOff32(0,0)] destptr mem))
+ (MOVLstoreconst [makeValAndOff(0,4)] destptr
+ (MOVLstoreconst [makeValAndOff(0,0)] destptr mem))
(Zero [12] destptr mem) =>
- (MOVLstoreconst [makeValAndOff32(0,8)] destptr
- (MOVLstoreconst [makeValAndOff32(0,4)] destptr
- (MOVLstoreconst [makeValAndOff32(0,0)] destptr mem)))
+ (MOVLstoreconst [makeValAndOff(0,8)] destptr
+ (MOVLstoreconst [makeValAndOff(0,4)] destptr
+ (MOVLstoreconst [makeValAndOff(0,0)] destptr mem)))
(Zero [16] destptr mem) =>
- (MOVLstoreconst [makeValAndOff32(0,12)] destptr
- (MOVLstoreconst [makeValAndOff32(0,8)] destptr
- (MOVLstoreconst [makeValAndOff32(0,4)] destptr
- (MOVLstoreconst [makeValAndOff32(0,0)] destptr mem))))
+ (MOVLstoreconst [makeValAndOff(0,12)] destptr
+ (MOVLstoreconst [makeValAndOff(0,8)] destptr
+ (MOVLstoreconst [makeValAndOff(0,4)] destptr
+ (MOVLstoreconst [makeValAndOff(0,0)] destptr mem))))
// Medium zeroing uses a duff device.
(Zero [s] destptr mem)
((ADD|AND|OR|XOR)Lconstmodify [valoff1.addOffset32(off2)] {sym} base mem)
// Fold constants into stores.
-(MOVLstore [off] {sym} ptr (MOVLconst [c]) mem) && validOff(int64(off)) =>
- (MOVLstoreconst [makeValAndOff32(c,off)] {sym} ptr mem)
-(MOVWstore [off] {sym} ptr (MOVLconst [c]) mem) && validOff(int64(off)) =>
- (MOVWstoreconst [makeValAndOff32(c,off)] {sym} ptr mem)
-(MOVBstore [off] {sym} ptr (MOVLconst [c]) mem) && validOff(int64(off)) =>
- (MOVBstoreconst [makeValAndOff32(c,off)] {sym} ptr mem)
+(MOVLstore [off] {sym} ptr (MOVLconst [c]) mem) =>
+ (MOVLstoreconst [makeValAndOff(c,off)] {sym} ptr mem)
+(MOVWstore [off] {sym} ptr (MOVLconst [c]) mem) =>
+ (MOVWstoreconst [makeValAndOff(c,off)] {sym} ptr mem)
+(MOVBstore [off] {sym} ptr (MOVLconst [c]) mem) =>
+ (MOVBstoreconst [makeValAndOff(c,off)] {sym} ptr mem)
// Fold address offsets into constant stores.
(MOV(L|W|B)storeconst [sc] {s} (ADDLconst [off] ptr) mem) && sc.canAdd32(off) =>
(MOVLstore {sym} [off] ptr y:((ADD|SUB|AND|OR|XOR)L l:(MOVLload [off] {sym} ptr mem) x) mem) && y.Uses==1 && l.Uses==1 && clobber(y, l) =>
((ADD|SUB|AND|OR|XOR)Lmodify [off] {sym} ptr x mem)
(MOVLstore {sym} [off] ptr y:((ADD|AND|OR|XOR)Lconst [c] l:(MOVLload [off] {sym} ptr mem)) mem)
- && y.Uses==1 && l.Uses==1 && clobber(y, l) && validValAndOff(int64(c),int64(off)) =>
- ((ADD|AND|OR|XOR)Lconstmodify [makeValAndOff32(c,off)] {sym} ptr mem)
+ && y.Uses==1 && l.Uses==1 && clobber(y, l) =>
+ ((ADD|AND|OR|XOR)Lconstmodify [makeValAndOff(c,off)] {sym} ptr mem)
// fold LEALs together
(LEAL [off1] {sym1} (LEAL [off2] {sym2} x)) && is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2) =>
&& x.Uses == 1
&& a.Off() + 1 == c.Off()
&& clobber(x)
- => (MOVWstoreconst [makeValAndOff32(int32(a.Val()&0xff | c.Val()<<8), a.Off32())] {s} p mem)
+ => (MOVWstoreconst [makeValAndOff(a.Val()&0xff | c.Val()<<8, a.Off())] {s} p mem)
(MOVBstoreconst [a] {s} p x:(MOVBstoreconst [c] {s} p mem))
&& x.Uses == 1
&& a.Off() + 1 == c.Off()
&& clobber(x)
- => (MOVWstoreconst [makeValAndOff32(int32(a.Val()&0xff | c.Val()<<8), a.Off32())] {s} p mem)
+ => (MOVWstoreconst [makeValAndOff(a.Val()&0xff | c.Val()<<8, a.Off())] {s} p mem)
(MOVBstoreconst [c] {s} p1 x:(MOVBstoreconst [a] {s} p0 mem))
&& x.Uses == 1
&& a.Off() == c.Off()
&& sequentialAddresses(p0, p1, 1)
&& clobber(x)
- => (MOVWstoreconst [makeValAndOff32(int32(a.Val()&0xff | c.Val()<<8), a.Off32())] {s} p0 mem)
+ => (MOVWstoreconst [makeValAndOff(a.Val()&0xff | c.Val()<<8, a.Off())] {s} p0 mem)
(MOVBstoreconst [a] {s} p0 x:(MOVBstoreconst [c] {s} p1 mem))
&& x.Uses == 1
&& a.Off() == c.Off()
&& sequentialAddresses(p0, p1, 1)
&& clobber(x)
- => (MOVWstoreconst [makeValAndOff32(int32(a.Val()&0xff | c.Val()<<8), a.Off32())] {s} p0 mem)
+ => (MOVWstoreconst [makeValAndOff(a.Val()&0xff | c.Val()<<8, a.Off())] {s} p0 mem)
(MOVWstoreconst [c] {s} p x:(MOVWstoreconst [a] {s} p mem))
&& x.Uses == 1
&& a.Off() + 2 == c.Off()
&& clobber(x)
- => (MOVLstoreconst [makeValAndOff32(int32(a.Val()&0xffff | c.Val()<<16), a.Off32())] {s} p mem)
+ => (MOVLstoreconst [makeValAndOff(a.Val()&0xffff | c.Val()<<16, a.Off())] {s} p mem)
(MOVWstoreconst [a] {s} p x:(MOVWstoreconst [c] {s} p mem))
&& x.Uses == 1
&& ValAndOff(a).Off() + 2 == ValAndOff(c).Off()
&& clobber(x)
- => (MOVLstoreconst [makeValAndOff32(int32(a.Val()&0xffff | c.Val()<<16), a.Off32())] {s} p mem)
+ => (MOVLstoreconst [makeValAndOff(a.Val()&0xffff | c.Val()<<16, a.Off())] {s} p mem)
(MOVWstoreconst [c] {s} p1 x:(MOVWstoreconst [a] {s} p0 mem))
&& x.Uses == 1
&& a.Off() == c.Off()
&& sequentialAddresses(p0, p1, 2)
&& clobber(x)
- => (MOVLstoreconst [makeValAndOff32(int32(a.Val()&0xffff | c.Val()<<16), a.Off32())] {s} p0 mem)
+ => (MOVLstoreconst [makeValAndOff(a.Val()&0xffff | c.Val()<<16, a.Off())] {s} p0 mem)
(MOVWstoreconst [a] {s} p0 x:(MOVWstoreconst [c] {s} p1 mem))
&& x.Uses == 1
&& a.Off() == c.Off()
&& sequentialAddresses(p0, p1, 2)
&& clobber(x)
- => (MOVLstoreconst [makeValAndOff32(int32(a.Val()&0xffff | c.Val()<<16), a.Off32())] {s} p0 mem)
+ => (MOVLstoreconst [makeValAndOff(a.Val()&0xffff | c.Val()<<16, a.Off())] {s} p0 mem)
// Combine stores into larger (unaligned) stores.
(MOVBstore [i] {s} p (SHR(W|L)const [8] w) x:(MOVBstore [i-1] {s} p w mem))
(CMP(L|W|B)const l:(MOV(L|W|B)load {sym} [off] ptr mem) [c])
&& l.Uses == 1
- && validValAndOff(int64(c), int64(off))
&& clobber(l) =>
- @l.Block (CMP(L|W|B)constload {sym} [makeValAndOff32(int32(c),int32(off))] ptr mem)
+ @l.Block (CMP(L|W|B)constload {sym} [makeValAndOff(int32(c),off)] ptr mem)
-(CMPLload {sym} [off] ptr (MOVLconst [c]) mem) && validValAndOff(int64(c),int64(off)) => (CMPLconstload {sym} [makeValAndOff32(c,off)] ptr mem)
-(CMPWload {sym} [off] ptr (MOVLconst [c]) mem) && validValAndOff(int64(int16(c)),int64(off)) => (CMPWconstload {sym} [makeValAndOff32(int32(int16(c)),off)] ptr mem)
-(CMPBload {sym} [off] ptr (MOVLconst [c]) mem) && validValAndOff(int64(int8(c)),int64(off)) => (CMPBconstload {sym} [makeValAndOff32(int32(int8(c)),off)] ptr mem)
+(CMPLload {sym} [off] ptr (MOVLconst [c]) mem) => (CMPLconstload {sym} [makeValAndOff(c,off)] ptr mem)
+(CMPWload {sym} [off] ptr (MOVLconst [c]) mem) => (CMPWconstload {sym} [makeValAndOff(int32(int16(c)),off)] ptr mem)
+(CMPBload {sym} [off] ptr (MOVLconst [c]) mem) => (CMPBconstload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
(MOVBload [off] {sym} (SB) _) && symIsRO(sym) => (MOVLconst [int32(read8(sym, int64(off)))])
(MOVWload [off] {sym} (SB) _) && symIsRO(sym) => (MOVLconst [int32(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))])
(CMP(L|W|B)load {sym} [off] ptr x mem) => (CMP(L|W|B) (MOV(L|W|B)load {sym} [off] ptr mem) x)
-(CMPLconstload {sym} [vo] ptr mem) => (CMPLconst (MOVLload {sym} [vo.Off32()] ptr mem) [vo.Val32()])
-(CMPWconstload {sym} [vo] ptr mem) => (CMPWconst (MOVWload {sym} [vo.Off32()] ptr mem) [vo.Val16()])
-(CMPBconstload {sym} [vo] ptr mem) => (CMPBconst (MOVBload {sym} [vo.Off32()] ptr mem) [vo.Val8()])
+(CMPLconstload {sym} [vo] ptr mem) => (CMPLconst (MOVLload {sym} [vo.Off()] ptr mem) [vo.Val()])
+(CMPWconstload {sym} [vo] ptr mem) => (CMPWconst (MOVWload {sym} [vo.Off()] ptr mem) [vo.Val16()])
+(CMPBconstload {sym} [vo] ptr mem) => (CMPBconst (MOVBload {sym} [vo.Off()] ptr mem) [vo.Val8()])
// Lowering Zero instructions
(Zero [0] _ mem) => mem
-(Zero [1] destptr mem) => (MOVBstoreconst [makeValAndOff32(0,0)] destptr mem)
-(Zero [2] destptr mem) => (MOVWstoreconst [makeValAndOff32(0,0)] destptr mem)
-(Zero [4] destptr mem) => (MOVLstoreconst [makeValAndOff32(0,0)] destptr mem)
-(Zero [8] destptr mem) => (MOVQstoreconst [makeValAndOff32(0,0)] destptr mem)
+(Zero [1] destptr mem) => (MOVBstoreconst [makeValAndOff(0,0)] destptr mem)
+(Zero [2] destptr mem) => (MOVWstoreconst [makeValAndOff(0,0)] destptr mem)
+(Zero [4] destptr mem) => (MOVLstoreconst [makeValAndOff(0,0)] destptr mem)
+(Zero [8] destptr mem) => (MOVQstoreconst [makeValAndOff(0,0)] destptr mem)
(Zero [3] destptr mem) =>
- (MOVBstoreconst [makeValAndOff32(0,2)] destptr
- (MOVWstoreconst [makeValAndOff32(0,0)] destptr mem))
+ (MOVBstoreconst [makeValAndOff(0,2)] destptr
+ (MOVWstoreconst [makeValAndOff(0,0)] destptr mem))
(Zero [5] destptr mem) =>
- (MOVBstoreconst [makeValAndOff32(0,4)] destptr
- (MOVLstoreconst [makeValAndOff32(0,0)] destptr mem))
+ (MOVBstoreconst [makeValAndOff(0,4)] destptr
+ (MOVLstoreconst [makeValAndOff(0,0)] destptr mem))
(Zero [6] destptr mem) =>
- (MOVWstoreconst [makeValAndOff32(0,4)] destptr
- (MOVLstoreconst [makeValAndOff32(0,0)] destptr mem))
+ (MOVWstoreconst [makeValAndOff(0,4)] destptr
+ (MOVLstoreconst [makeValAndOff(0,0)] destptr mem))
(Zero [7] destptr mem) =>
- (MOVLstoreconst [makeValAndOff32(0,3)] destptr
- (MOVLstoreconst [makeValAndOff32(0,0)] destptr mem))
+ (MOVLstoreconst [makeValAndOff(0,3)] destptr
+ (MOVLstoreconst [makeValAndOff(0,0)] destptr mem))
// Strip off any fractional word zeroing.
(Zero [s] destptr mem) && s%8 != 0 && s > 8 && !config.useSSE =>
(Zero [s-s%8] (OffPtr <destptr.Type> destptr [s%8])
- (MOVQstoreconst [makeValAndOff32(0,0)] destptr mem))
+ (MOVQstoreconst [makeValAndOff(0,0)] destptr mem))
// Zero small numbers of words directly.
(Zero [16] destptr mem) && !config.useSSE =>
- (MOVQstoreconst [makeValAndOff32(0,8)] destptr
- (MOVQstoreconst [makeValAndOff32(0,0)] destptr mem))
+ (MOVQstoreconst [makeValAndOff(0,8)] destptr
+ (MOVQstoreconst [makeValAndOff(0,0)] destptr mem))
(Zero [24] destptr mem) && !config.useSSE =>
- (MOVQstoreconst [makeValAndOff32(0,16)] destptr
- (MOVQstoreconst [makeValAndOff32(0,8)] destptr
- (MOVQstoreconst [makeValAndOff32(0,0)] destptr mem)))
+ (MOVQstoreconst [makeValAndOff(0,16)] destptr
+ (MOVQstoreconst [makeValAndOff(0,8)] destptr
+ (MOVQstoreconst [makeValAndOff(0,0)] destptr mem)))
(Zero [32] destptr mem) && !config.useSSE =>
- (MOVQstoreconst [makeValAndOff32(0,24)] destptr
- (MOVQstoreconst [makeValAndOff32(0,16)] destptr
- (MOVQstoreconst [makeValAndOff32(0,8)] destptr
- (MOVQstoreconst [makeValAndOff32(0,0)] destptr mem))))
+ (MOVQstoreconst [makeValAndOff(0,24)] destptr
+ (MOVQstoreconst [makeValAndOff(0,16)] destptr
+ (MOVQstoreconst [makeValAndOff(0,8)] destptr
+ (MOVQstoreconst [makeValAndOff(0,0)] destptr mem))))
(Zero [s] destptr mem) && s > 8 && s < 16 && config.useSSE =>
- (MOVQstoreconst [makeValAndOff32(0,int32(s-8))] destptr
- (MOVQstoreconst [makeValAndOff32(0,0)] destptr mem))
+ (MOVQstoreconst [makeValAndOff(0,int32(s-8))] destptr
+ (MOVQstoreconst [makeValAndOff(0,0)] destptr mem))
// Adjust zeros to be a multiple of 16 bytes.
(Zero [s] destptr mem) && s%16 != 0 && s > 16 && s%16 > 8 && config.useSSE =>
(Zero [s] destptr mem) && s%16 != 0 && s > 16 && s%16 <= 8 && config.useSSE =>
(Zero [s-s%16] (OffPtr <destptr.Type> destptr [s%16])
- (MOVQstoreconst [makeValAndOff32(0,0)] destptr mem))
+ (MOVQstoreconst [makeValAndOff(0,0)] destptr mem))
(Zero [16] destptr mem) && config.useSSE =>
(MOVOstorezero destptr mem)
// Fold constants into stores.
(MOVQstore [off] {sym} ptr (MOVQconst [c]) mem) && validVal(c) =>
- (MOVQstoreconst [makeValAndOff32(int32(c),off)] {sym} ptr mem)
+ (MOVQstoreconst [makeValAndOff(int32(c),off)] {sym} ptr mem)
(MOVLstore [off] {sym} ptr (MOV(L|Q)const [c]) mem) =>
- (MOVLstoreconst [makeValAndOff32(int32(c),off)] {sym} ptr mem)
+ (MOVLstoreconst [makeValAndOff(int32(c),off)] {sym} ptr mem)
(MOVWstore [off] {sym} ptr (MOV(L|Q)const [c]) mem) =>
- (MOVWstoreconst [makeValAndOff32(int32(int16(c)),off)] {sym} ptr mem)
+ (MOVWstoreconst [makeValAndOff(int32(int16(c)),off)] {sym} ptr mem)
(MOVBstore [off] {sym} ptr (MOV(L|Q)const [c]) mem) =>
- (MOVBstoreconst [makeValAndOff32(int32(int8(c)),off)] {sym} ptr mem)
+ (MOVBstoreconst [makeValAndOff(int32(int8(c)),off)] {sym} ptr mem)
// Fold address offsets into constant stores.
(MOV(Q|L|W|B)storeconst [sc] {s} (ADDQconst [off] ptr) mem) && ValAndOff(sc).canAdd32(off) =>
&& x.Uses == 1
&& a.Off() + 1 == c.Off()
&& clobber(x)
- => (MOVWstoreconst [makeValAndOff64(a.Val()&0xff | c.Val()<<8, a.Off())] {s} p mem)
+ => (MOVWstoreconst [makeValAndOff(a.Val()&0xff | c.Val()<<8, a.Off())] {s} p mem)
(MOVBstoreconst [a] {s} p x:(MOVBstoreconst [c] {s} p mem))
&& x.Uses == 1
&& a.Off() + 1 == c.Off()
&& clobber(x)
- => (MOVWstoreconst [makeValAndOff64(a.Val()&0xff | c.Val()<<8, a.Off())] {s} p mem)
+ => (MOVWstoreconst [makeValAndOff(a.Val()&0xff | c.Val()<<8, a.Off())] {s} p mem)
(MOVWstoreconst [c] {s} p x:(MOVWstoreconst [a] {s} p mem))
&& x.Uses == 1
&& a.Off() + 2 == c.Off()
&& clobber(x)
- => (MOVLstoreconst [makeValAndOff64(a.Val()&0xffff | c.Val()<<16, a.Off())] {s} p mem)
+ => (MOVLstoreconst [makeValAndOff(a.Val()&0xffff | c.Val()<<16, a.Off())] {s} p mem)
(MOVWstoreconst [a] {s} p x:(MOVWstoreconst [c] {s} p mem))
&& x.Uses == 1
&& a.Off() + 2 == c.Off()
&& clobber(x)
- => (MOVLstoreconst [makeValAndOff64(a.Val()&0xffff | c.Val()<<16, a.Off())] {s} p mem)
+ => (MOVLstoreconst [makeValAndOff(a.Val()&0xffff | c.Val()<<16, a.Off())] {s} p mem)
(MOVLstoreconst [c] {s} p x:(MOVLstoreconst [a] {s} p mem))
&& x.Uses == 1
&& a.Off() + 4 == c.Off()
&& clobber(x)
- => (MOVQstore [a.Off32()] {s} p (MOVQconst [a.Val()&0xffffffff | c.Val()<<32]) mem)
+ => (MOVQstore [a.Off()] {s} p (MOVQconst [a.Val64()&0xffffffff | c.Val64()<<32]) mem)
(MOVLstoreconst [a] {s} p x:(MOVLstoreconst [c] {s} p mem))
&& x.Uses == 1
&& a.Off() + 4 == c.Off()
&& clobber(x)
- => (MOVQstore [a.Off32()] {s} p (MOVQconst [a.Val()&0xffffffff | c.Val()<<32]) mem)
+ => (MOVQstore [a.Off()] {s} p (MOVQconst [a.Val64()&0xffffffff | c.Val64()<<32]) mem)
(MOVQstoreconst [c] {s} p x:(MOVQstoreconst [c2] {s} p mem))
&& config.useSSE
&& x.Uses == 1
&& c.Val() == 0
&& c2.Val() == 0
&& clobber(x)
- => (MOVOstorezero [c2.Off32()] {s} p mem)
+ => (MOVOstorezero [c2.Off()] {s} p mem)
// Combine stores into larger (unaligned) stores. Little endian.
(MOVBstore [i] {s} p (SHR(W|L|Q)const [8] w) x:(MOVBstore [i-1] {s} p w mem))
(MOVBQZX (MOVBQZX x)) => (MOVBQZX x)
(MOVQstore [off] {sym} ptr a:((ADD|AND|OR|XOR|BTC|BTR|BTS)Qconst [c] l:(MOVQload [off] {sym} ptr2 mem)) mem)
- && isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c),int64(off)) && clobber(l, a) =>
- ((ADD|AND|OR|XOR|BTC|BTR|BTS)Qconstmodify {sym} [makeValAndOff32(int32(c),off)] ptr mem)
+ && isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a) =>
+ ((ADD|AND|OR|XOR|BTC|BTR|BTS)Qconstmodify {sym} [makeValAndOff(int32(c),off)] ptr mem)
(MOVLstore [off] {sym} ptr a:((ADD|AND|OR|XOR|BTC|BTR|BTS)Lconst [c] l:(MOVLload [off] {sym} ptr2 mem)) mem)
- && isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c),int64(off)) && clobber(l, a) =>
- ((ADD|AND|OR|XOR|BTC|BTR|BTS)Lconstmodify {sym} [makeValAndOff32(int32(c),off)] ptr mem)
+ && isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a) =>
+ ((ADD|AND|OR|XOR|BTC|BTR|BTS)Lconstmodify {sym} [makeValAndOff(int32(c),off)] ptr mem)
// float <-> int register moves, with no conversion.
// These come up when compiling math.{Float{32,64}bits,Float{32,64}frombits}.
(CMP(Q|L)const l:(MOV(Q|L)load {sym} [off] ptr mem) [c])
&& l.Uses == 1
&& clobber(l) =>
-@l.Block (CMP(Q|L)constload {sym} [makeValAndOff32(c,off)] ptr mem)
+@l.Block (CMP(Q|L)constload {sym} [makeValAndOff(c,off)] ptr mem)
(CMP(W|B)const l:(MOV(W|B)load {sym} [off] ptr mem) [c])
&& l.Uses == 1
&& clobber(l) =>
-@l.Block (CMP(W|B)constload {sym} [makeValAndOff32(int32(c),off)] ptr mem)
+@l.Block (CMP(W|B)constload {sym} [makeValAndOff(int32(c),off)] ptr mem)
-(CMPQload {sym} [off] ptr (MOVQconst [c]) mem) && validValAndOff(c,int64(off)) => (CMPQconstload {sym} [makeValAndOff64(c,int64(off))] ptr mem)
-(CMPLload {sym} [off] ptr (MOVLconst [c]) mem) && validValAndOff(int64(c),int64(off)) => (CMPLconstload {sym} [makeValAndOff32(c,off)] ptr mem)
-(CMPWload {sym} [off] ptr (MOVLconst [c]) mem) && validValAndOff(int64(int16(c)),int64(off)) => (CMPWconstload {sym} [makeValAndOff32(int32(int16(c)),off)] ptr mem)
-(CMPBload {sym} [off] ptr (MOVLconst [c]) mem) && validValAndOff(int64(int8(c)),int64(off)) => (CMPBconstload {sym} [makeValAndOff32(int32(int8(c)),off)] ptr mem)
+(CMPQload {sym} [off] ptr (MOVQconst [c]) mem) && validVal(c) => (CMPQconstload {sym} [makeValAndOff(int32(c),off)] ptr mem)
+(CMPLload {sym} [off] ptr (MOVLconst [c]) mem) => (CMPLconstload {sym} [makeValAndOff(c,off)] ptr mem)
+(CMPWload {sym} [off] ptr (MOVLconst [c]) mem) => (CMPWconstload {sym} [makeValAndOff(int32(int16(c)),off)] ptr mem)
+(CMPBload {sym} [off] ptr (MOVLconst [c]) mem) => (CMPBconstload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
(TEST(Q|L|W|B) l:(MOV(Q|L|W|B)load {sym} [off] ptr mem) l2)
&& l == l2
&& l.Uses == 2
- && validValAndOff(0, int64(off))
&& clobber(l) =>
- @l.Block (CMP(Q|L|W|B)constload {sym} [makeValAndOff64(0, int64(off))] ptr mem)
+ @l.Block (CMP(Q|L|W|B)constload {sym} [makeValAndOff(0, off)] ptr mem)
// Convert ANDload to MOVload when we can do the AND in a containing TEST op.
// Only do when it's within the same block, so we don't have flags live across basic block boundaries.
(CMP(Q|L|W|B)load {sym} [off] ptr x mem) => (CMP(Q|L|W|B) (MOV(Q|L|W|B)load {sym} [off] ptr mem) x)
-(CMP(Q|L|W|B)constload {sym} [vo] ptr mem) && vo.Val() == 0 => (TEST(Q|L|W|B) x:(MOV(Q|L|W|B)load {sym} [vo.Off32()] ptr mem) x)
+(CMP(Q|L|W|B)constload {sym} [vo] ptr mem) && vo.Val() == 0 => (TEST(Q|L|W|B) x:(MOV(Q|L|W|B)load {sym} [vo.Off()] ptr mem) x)
-(CMPQconstload {sym} [vo] ptr mem) && vo.Val() != 0 => (CMPQconst (MOVQload {sym} [vo.Off32()] ptr mem) [vo.Val32()])
-(CMPLconstload {sym} [vo] ptr mem) && vo.Val() != 0 => (CMPLconst (MOVLload {sym} [vo.Off32()] ptr mem) [vo.Val32()])
-(CMPWconstload {sym} [vo] ptr mem) && vo.Val() != 0 => (CMPWconst (MOVWload {sym} [vo.Off32()] ptr mem) [vo.Val16()])
-(CMPBconstload {sym} [vo] ptr mem) && vo.Val() != 0 => (CMPBconst (MOVBload {sym} [vo.Off32()] ptr mem) [vo.Val8()])
+(CMPQconstload {sym} [vo] ptr mem) && vo.Val() != 0 => (CMPQconst (MOVQload {sym} [vo.Off()] ptr mem) [vo.Val()])
+(CMPLconstload {sym} [vo] ptr mem) && vo.Val() != 0 => (CMPLconst (MOVLload {sym} [vo.Off()] ptr mem) [vo.Val()])
+(CMPWconstload {sym} [vo] ptr mem) && vo.Val() != 0 => (CMPWconst (MOVWload {sym} [vo.Off()] ptr mem) [vo.Val16()])
+(CMPBconstload {sym} [vo] ptr mem) && vo.Val() != 0 => (CMPBconst (MOVBload {sym} [vo.Off()] ptr mem) [vo.Val8()])
(CMP(Q|L|W|B)loadidx1 {sym} [off] ptr idx x mem) => (CMP(Q|L|W|B) (MOV(Q|L|W|B)loadidx1 {sym} [off] ptr idx mem) x)
(CMPQloadidx8 {sym} [off] ptr idx x mem) => (CMPQ (MOVQloadidx8 {sym} [off] ptr idx mem) x)
(CMPLloadidx4 {sym} [off] ptr idx x mem) => (CMPL (MOVLloadidx4 {sym} [off] ptr idx mem) x)
(CMPWloadidx2 {sym} [off] ptr idx x mem) => (CMPW (MOVWloadidx2 {sym} [off] ptr idx mem) x)
-(CMP(Q|L|W|B)constloadidx1 {sym} [vo] ptr idx mem) && vo.Val() == 0 => (TEST(Q|L|W|B) x:(MOV(Q|L|W|B)loadidx1 {sym} [vo.Off32()] ptr idx mem) x)
-(CMPQconstloadidx8 {sym} [vo] ptr idx mem) && vo.Val() == 0 => (TESTQ x:(MOVQloadidx8 {sym} [vo.Off32()] ptr idx mem) x)
-(CMPLconstloadidx4 {sym} [vo] ptr idx mem) && vo.Val() == 0 => (TESTL x:(MOVLloadidx4 {sym} [vo.Off32()] ptr idx mem) x)
-(CMPWconstloadidx2 {sym} [vo] ptr idx mem) && vo.Val() == 0 => (TESTW x:(MOVWloadidx2 {sym} [vo.Off32()] ptr idx mem) x)
+(CMP(Q|L|W|B)constloadidx1 {sym} [vo] ptr idx mem) && vo.Val() == 0 => (TEST(Q|L|W|B) x:(MOV(Q|L|W|B)loadidx1 {sym} [vo.Off()] ptr idx mem) x)
+(CMPQconstloadidx8 {sym} [vo] ptr idx mem) && vo.Val() == 0 => (TESTQ x:(MOVQloadidx8 {sym} [vo.Off()] ptr idx mem) x)
+(CMPLconstloadidx4 {sym} [vo] ptr idx mem) && vo.Val() == 0 => (TESTL x:(MOVLloadidx4 {sym} [vo.Off()] ptr idx mem) x)
+(CMPWconstloadidx2 {sym} [vo] ptr idx mem) && vo.Val() == 0 => (TESTW x:(MOVWloadidx2 {sym} [vo.Off()] ptr idx mem) x)
-(CMPQconstloadidx1 {sym} [vo] ptr idx mem) && vo.Val() != 0 => (CMPQconst (MOVQloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
-(CMPLconstloadidx1 {sym} [vo] ptr idx mem) && vo.Val() != 0 => (CMPLconst (MOVLloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
-(CMPWconstloadidx1 {sym} [vo] ptr idx mem) && vo.Val() != 0 => (CMPWconst (MOVWloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val16()])
-(CMPBconstloadidx1 {sym} [vo] ptr idx mem) && vo.Val() != 0 => (CMPBconst (MOVBloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val8()])
+(CMPQconstloadidx1 {sym} [vo] ptr idx mem) && vo.Val() != 0 => (CMPQconst (MOVQloadidx1 {sym} [vo.Off()] ptr idx mem) [vo.Val()])
+(CMPLconstloadidx1 {sym} [vo] ptr idx mem) && vo.Val() != 0 => (CMPLconst (MOVLloadidx1 {sym} [vo.Off()] ptr idx mem) [vo.Val()])
+(CMPWconstloadidx1 {sym} [vo] ptr idx mem) && vo.Val() != 0 => (CMPWconst (MOVWloadidx1 {sym} [vo.Off()] ptr idx mem) [vo.Val16()])
+(CMPBconstloadidx1 {sym} [vo] ptr idx mem) && vo.Val() != 0 => (CMPBconst (MOVBloadidx1 {sym} [vo.Off()] ptr idx mem) [vo.Val8()])
-(CMPQconstloadidx8 {sym} [vo] ptr idx mem) && vo.Val() != 0 => (CMPQconst (MOVQloadidx8 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
-(CMPLconstloadidx4 {sym} [vo] ptr idx mem) && vo.Val() != 0 => (CMPLconst (MOVLloadidx4 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
-(CMPWconstloadidx2 {sym} [vo] ptr idx mem) && vo.Val() != 0 => (CMPWconst (MOVWloadidx2 {sym} [vo.Off32()] ptr idx mem) [vo.Val16()])
+(CMPQconstloadidx8 {sym} [vo] ptr idx mem) && vo.Val() != 0 => (CMPQconst (MOVQloadidx8 {sym} [vo.Off()] ptr idx mem) [vo.Val()])
+(CMPLconstloadidx4 {sym} [vo] ptr idx mem) && vo.Val() != 0 => (CMPLconst (MOVLloadidx4 {sym} [vo.Off()] ptr idx mem) [vo.Val()])
+(CMPWconstloadidx2 {sym} [vo] ptr idx mem) && vo.Val() != 0 => (CMPWconst (MOVWloadidx2 {sym} [vo.Off()] ptr idx mem) [vo.Val16()])
// MVC for other moves. Use up to 4 instructions (sizes up to 1024 bytes).
(Move [s] dst src mem) && s > 0 && s <= 256 && logLargeCopy(v, s) =>
- (MVC [makeValAndOff32(int32(s), 0)] dst src mem)
+ (MVC [makeValAndOff(int32(s), 0)] dst src mem)
(Move [s] dst src mem) && s > 256 && s <= 512 && logLargeCopy(v, s) =>
- (MVC [makeValAndOff32(int32(s)-256, 256)] dst src (MVC [makeValAndOff32(256, 0)] dst src mem))
+ (MVC [makeValAndOff(int32(s)-256, 256)] dst src (MVC [makeValAndOff(256, 0)] dst src mem))
(Move [s] dst src mem) && s > 512 && s <= 768 && logLargeCopy(v, s) =>
- (MVC [makeValAndOff32(int32(s)-512, 512)] dst src (MVC [makeValAndOff32(256, 256)] dst src (MVC [makeValAndOff32(256, 0)] dst src mem)))
+ (MVC [makeValAndOff(int32(s)-512, 512)] dst src (MVC [makeValAndOff(256, 256)] dst src (MVC [makeValAndOff(256, 0)] dst src mem)))
(Move [s] dst src mem) && s > 768 && s <= 1024 && logLargeCopy(v, s) =>
- (MVC [makeValAndOff32(int32(s)-768, 768)] dst src (MVC [makeValAndOff32(256, 512)] dst src (MVC [makeValAndOff32(256, 256)] dst src (MVC [makeValAndOff32(256, 0)] dst src mem))))
+ (MVC [makeValAndOff(int32(s)-768, 768)] dst src (MVC [makeValAndOff(256, 512)] dst src (MVC [makeValAndOff(256, 256)] dst src (MVC [makeValAndOff(256, 0)] dst src mem))))
// Move more than 1024 bytes using a loop.
(Move [s] dst src mem) && s > 1024 && logLargeCopy(v, s) =>
(Zero [4] destptr mem) => (MOVWstoreconst [0] destptr mem)
(Zero [8] destptr mem) => (MOVDstoreconst [0] destptr mem)
(Zero [3] destptr mem) =>
- (MOVBstoreconst [makeValAndOff32(0,2)] destptr
+ (MOVBstoreconst [makeValAndOff(0,2)] destptr
(MOVHstoreconst [0] destptr mem))
(Zero [5] destptr mem) =>
- (MOVBstoreconst [makeValAndOff32(0,4)] destptr
+ (MOVBstoreconst [makeValAndOff(0,4)] destptr
(MOVWstoreconst [0] destptr mem))
(Zero [6] destptr mem) =>
- (MOVHstoreconst [makeValAndOff32(0,4)] destptr
+ (MOVHstoreconst [makeValAndOff(0,4)] destptr
(MOVWstoreconst [0] destptr mem))
(Zero [7] destptr mem) =>
- (MOVWstoreconst [makeValAndOff32(0,3)] destptr
+ (MOVWstoreconst [makeValAndOff(0,3)] destptr
(MOVWstoreconst [0] destptr mem))
(Zero [s] destptr mem) && s > 0 && s <= 1024 =>
- (CLEAR [makeValAndOff32(int32(s), 0)] destptr mem)
+ (CLEAR [makeValAndOff(int32(s), 0)] destptr mem)
// Zero more than 1024 bytes using a loop.
(Zero [s] destptr mem) && s > 1024 =>
// Fold constants into stores.
(MOVDstore [off] {sym} ptr (MOVDconst [c]) mem) && is16Bit(c) && isU12Bit(int64(off)) && ptr.Op != OpSB =>
- (MOVDstoreconst [makeValAndOff32(int32(c),off)] {sym} ptr mem)
+ (MOVDstoreconst [makeValAndOff(int32(c),off)] {sym} ptr mem)
(MOVWstore [off] {sym} ptr (MOVDconst [c]) mem) && is16Bit(c) && isU12Bit(int64(off)) && ptr.Op != OpSB =>
- (MOVWstoreconst [makeValAndOff32(int32(c),off)] {sym} ptr mem)
+ (MOVWstoreconst [makeValAndOff(int32(c),off)] {sym} ptr mem)
(MOVHstore [off] {sym} ptr (MOVDconst [c]) mem) && isU12Bit(int64(off)) && ptr.Op != OpSB =>
- (MOVHstoreconst [makeValAndOff32(int32(int16(c)),off)] {sym} ptr mem)
+ (MOVHstoreconst [makeValAndOff(int32(int16(c)),off)] {sym} ptr mem)
(MOVBstore [off] {sym} ptr (MOVDconst [c]) mem) && is20Bit(int64(off)) && ptr.Op != OpSB =>
- (MOVBstoreconst [makeValAndOff32(int32(int8(c)),off)] {sym} ptr mem)
+ (MOVBstoreconst [makeValAndOff(int32(int8(c)),off)] {sym} ptr mem)
// Fold address offsets into constant stores.
-(MOVDstoreconst [sc] {s} (ADDconst [off] ptr) mem) && isU12Bit(sc.Off()+int64(off)) =>
+(MOVDstoreconst [sc] {s} (ADDconst [off] ptr) mem) && isU12Bit(sc.Off64()+int64(off)) =>
(MOVDstoreconst [sc.addOffset32(off)] {s} ptr mem)
-(MOVWstoreconst [sc] {s} (ADDconst [off] ptr) mem) && isU12Bit(sc.Off()+int64(off)) =>
+(MOVWstoreconst [sc] {s} (ADDconst [off] ptr) mem) && isU12Bit(sc.Off64()+int64(off)) =>
(MOVWstoreconst [sc.addOffset32(off)] {s} ptr mem)
-(MOVHstoreconst [sc] {s} (ADDconst [off] ptr) mem) && isU12Bit(sc.Off()+int64(off)) =>
+(MOVHstoreconst [sc] {s} (ADDconst [off] ptr) mem) && isU12Bit(sc.Off64()+int64(off)) =>
(MOVHstoreconst [sc.addOffset32(off)] {s} ptr mem)
-(MOVBstoreconst [sc] {s} (ADDconst [off] ptr) mem) && is20Bit(sc.Off()+int64(off)) =>
+(MOVBstoreconst [sc] {s} (ADDconst [off] ptr) mem) && is20Bit(sc.Off64()+int64(off)) =>
(MOVBstoreconst [sc.addOffset32(off)] {s} ptr mem)
// Merge address calculations into loads and stores.
&& x.Uses == 1
&& a.Off() + 1 == c.Off()
&& clobber(x)
- => (MOVHstoreconst [makeValAndOff32(c.Val32()&0xff | a.Val32()<<8, a.Off32())] {s} p mem)
+ => (MOVHstoreconst [makeValAndOff(c.Val()&0xff | a.Val()<<8, a.Off())] {s} p mem)
(MOVHstoreconst [c] {s} p x:(MOVHstoreconst [a] {s} p mem))
&& p.Op != OpSB
&& x.Uses == 1
&& a.Off() + 2 == c.Off()
&& clobber(x)
- => (MOVWstore [a.Off32()] {s} p (MOVDconst [int64(c.Val32()&0xffff | a.Val32()<<16)]) mem)
+ => (MOVWstore [a.Off()] {s} p (MOVDconst [int64(c.Val()&0xffff | a.Val()<<16)]) mem)
(MOVWstoreconst [c] {s} p x:(MOVWstoreconst [a] {s} p mem))
&& p.Op != OpSB
&& x.Uses == 1
&& a.Off() + 4 == c.Off()
&& clobber(x)
- => (MOVDstore [a.Off32()] {s} p (MOVDconst [c.Val()&0xffffffff | a.Val()<<32]) mem)
+ => (MOVDstore [a.Off()] {s} p (MOVDconst [c.Val64()&0xffffffff | a.Val64()<<32]) mem)
// Combine stores into larger (unaligned) stores.
// It doesn't work on global data (based on SB) because stores with relative addressing
// The low 32 bits hold a pointer offset.
type ValAndOff int64
-func (x ValAndOff) Val() int64 { return int64(x) >> 32 }
-func (x ValAndOff) Val32() int32 { return int32(int64(x) >> 32) }
+func (x ValAndOff) Val() int32 { return int32(int64(x) >> 32) }
+func (x ValAndOff) Val64() int64 { return int64(x) >> 32 }
func (x ValAndOff) Val16() int16 { return int16(int64(x) >> 32) }
func (x ValAndOff) Val8() int8 { return int8(int64(x) >> 32) }
-func (x ValAndOff) Off() int64 { return int64(int32(x)) }
-func (x ValAndOff) Off32() int32 { return int32(x) }
+func (x ValAndOff) Off64() int64 { return int64(int32(x)) }
+func (x ValAndOff) Off() int32 { return int32(x) }
func (x ValAndOff) String() string {
return fmt.Sprintf("val=%d,off=%d", x.Val(), x.Off())
return val == int64(int32(val))
}
-// validOff reports whether the offset can be used
-// as an argument to makeValAndOff.
-func validOff(off int64) bool {
- return off == int64(int32(off))
-}
-
-// validValAndOff reports whether we can fit the value and offset into
-// a ValAndOff value.
-func validValAndOff(val, off int64) bool {
- if !validVal(val) {
- return false
- }
- if !validOff(off) {
- return false
- }
- return true
-}
-
-func makeValAndOff32(val, off int32) ValAndOff {
+func makeValAndOff(val, off int32) ValAndOff {
return ValAndOff(int64(val)<<32 + int64(uint32(off)))
}
-func makeValAndOff64(val, off int64) ValAndOff {
- if !validValAndOff(val, off) {
- panic("invalid makeValAndOff64")
- }
- return ValAndOff(val<<32 + int64(uint32(off)))
-}
func (x ValAndOff) canAdd32(off int32) bool {
- newoff := x.Off() + int64(off)
+ newoff := x.Off64() + int64(off)
return newoff == int64(int32(newoff))
}
func (x ValAndOff) canAdd64(off int64) bool {
- newoff := x.Off() + off
+ newoff := x.Off64() + off
return newoff == int64(int32(newoff))
}
if !x.canAdd32(off) {
panic("invalid ValAndOff.addOffset32")
}
- return makeValAndOff64(x.Val(), x.Off()+int64(off))
+ return makeValAndOff(x.Val(), x.Off()+off)
}
func (x ValAndOff) addOffset64(off int64) ValAndOff {
if !x.canAdd64(off) {
panic("invalid ValAndOff.addOffset64")
}
- return makeValAndOff64(x.Val(), x.Off()+off)
+ return makeValAndOff(x.Val(), x.Off()+int32(off))
}
// int128 is a type that stores a 128-bit constant.
return true
}
// match: (CMPBconst l:(MOVBload {sym} [off] ptr mem) [c])
- // cond: l.Uses == 1 && validValAndOff(int64(c), int64(off)) && clobber(l)
- // result: @l.Block (CMPBconstload {sym} [makeValAndOff32(int32(c),int32(off))] ptr mem)
+ // cond: l.Uses == 1 && clobber(l)
+ // result: @l.Block (CMPBconstload {sym} [makeValAndOff(int32(c),off)] ptr mem)
for {
c := auxIntToInt8(v.AuxInt)
l := v_0
sym := auxToSym(l.Aux)
mem := l.Args[1]
ptr := l.Args[0]
- if !(l.Uses == 1 && validValAndOff(int64(c), int64(off)) && clobber(l)) {
+ if !(l.Uses == 1 && clobber(l)) {
break
}
b = l.Block
v0 := b.NewValue0(l.Pos, Op386CMPBconstload, types.TypeFlags)
v.copyOf(v0)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), int32(off)))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v0.Aux = symToAux(sym)
v0.AddArg2(ptr, mem)
return true
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (CMPBload {sym} [off] ptr (MOVLconst [c]) mem)
- // cond: validValAndOff(int64(int8(c)),int64(off))
- // result: (CMPBconstload {sym} [makeValAndOff32(int32(int8(c)),off)] ptr mem)
+ // result: (CMPBconstload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
c := auxIntToInt32(v_1.AuxInt)
mem := v_2
- if !(validValAndOff(int64(int8(c)), int64(off))) {
- break
- }
v.reset(Op386CMPBconstload)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(int8(c)), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(int8(c)), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
return true
}
// match: (CMPLconst l:(MOVLload {sym} [off] ptr mem) [c])
- // cond: l.Uses == 1 && validValAndOff(int64(c), int64(off)) && clobber(l)
- // result: @l.Block (CMPLconstload {sym} [makeValAndOff32(int32(c),int32(off))] ptr mem)
+ // cond: l.Uses == 1 && clobber(l)
+ // result: @l.Block (CMPLconstload {sym} [makeValAndOff(int32(c),off)] ptr mem)
for {
c := auxIntToInt32(v.AuxInt)
l := v_0
sym := auxToSym(l.Aux)
mem := l.Args[1]
ptr := l.Args[0]
- if !(l.Uses == 1 && validValAndOff(int64(c), int64(off)) && clobber(l)) {
+ if !(l.Uses == 1 && clobber(l)) {
break
}
b = l.Block
v0 := b.NewValue0(l.Pos, Op386CMPLconstload, types.TypeFlags)
v.copyOf(v0)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), int32(off)))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v0.Aux = symToAux(sym)
v0.AddArg2(ptr, mem)
return true
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (CMPLload {sym} [off] ptr (MOVLconst [c]) mem)
- // cond: validValAndOff(int64(c),int64(off))
- // result: (CMPLconstload {sym} [makeValAndOff32(c,off)] ptr mem)
+ // result: (CMPLconstload {sym} [makeValAndOff(c,off)] ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
c := auxIntToInt32(v_1.AuxInt)
mem := v_2
- if !(validValAndOff(int64(c), int64(off))) {
- break
- }
v.reset(Op386CMPLconstload)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(c, off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(c, off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
return true
}
// match: (CMPWconst l:(MOVWload {sym} [off] ptr mem) [c])
- // cond: l.Uses == 1 && validValAndOff(int64(c), int64(off)) && clobber(l)
- // result: @l.Block (CMPWconstload {sym} [makeValAndOff32(int32(c),int32(off))] ptr mem)
+ // cond: l.Uses == 1 && clobber(l)
+ // result: @l.Block (CMPWconstload {sym} [makeValAndOff(int32(c),off)] ptr mem)
for {
c := auxIntToInt16(v.AuxInt)
l := v_0
sym := auxToSym(l.Aux)
mem := l.Args[1]
ptr := l.Args[0]
- if !(l.Uses == 1 && validValAndOff(int64(c), int64(off)) && clobber(l)) {
+ if !(l.Uses == 1 && clobber(l)) {
break
}
b = l.Block
v0 := b.NewValue0(l.Pos, Op386CMPWconstload, types.TypeFlags)
v.copyOf(v0)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), int32(off)))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v0.Aux = symToAux(sym)
v0.AddArg2(ptr, mem)
return true
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (CMPWload {sym} [off] ptr (MOVLconst [c]) mem)
- // cond: validValAndOff(int64(int16(c)),int64(off))
- // result: (CMPWconstload {sym} [makeValAndOff32(int32(int16(c)),off)] ptr mem)
+ // result: (CMPWconstload {sym} [makeValAndOff(int32(int16(c)),off)] ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
c := auxIntToInt32(v_1.AuxInt)
mem := v_2
- if !(validValAndOff(int64(int16(c)), int64(off))) {
- break
- }
v.reset(Op386CMPWconstload)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(int16(c)), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(int16(c)), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
return true
}
// match: (MOVBstore [off] {sym} ptr (MOVLconst [c]) mem)
- // cond: validOff(int64(off))
- // result: (MOVBstoreconst [makeValAndOff32(c,off)] {sym} ptr mem)
+ // result: (MOVBstoreconst [makeValAndOff(c,off)] {sym} ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
c := auxIntToInt32(v_1.AuxInt)
mem := v_2
- if !(validOff(int64(off))) {
- break
- }
v.reset(Op386MOVBstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(c, off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(c, off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVBstoreconst [c] {s} p x:(MOVBstoreconst [a] {s} p mem))
// cond: x.Uses == 1 && a.Off() + 1 == c.Off() && clobber(x)
- // result: (MOVWstoreconst [makeValAndOff32(int32(a.Val()&0xff | c.Val()<<8), a.Off32())] {s} p mem)
+ // result: (MOVWstoreconst [makeValAndOff(a.Val()&0xff | c.Val()<<8, a.Off())] {s} p mem)
for {
c := auxIntToValAndOff(v.AuxInt)
s := auxToSym(v.Aux)
break
}
v.reset(Op386MOVWstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(a.Val()&0xff|c.Val()<<8), a.Off32()))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(a.Val()&0xff|c.Val()<<8, a.Off()))
v.Aux = symToAux(s)
v.AddArg2(p, mem)
return true
}
// match: (MOVBstoreconst [a] {s} p x:(MOVBstoreconst [c] {s} p mem))
// cond: x.Uses == 1 && a.Off() + 1 == c.Off() && clobber(x)
- // result: (MOVWstoreconst [makeValAndOff32(int32(a.Val()&0xff | c.Val()<<8), a.Off32())] {s} p mem)
+ // result: (MOVWstoreconst [makeValAndOff(a.Val()&0xff | c.Val()<<8, a.Off())] {s} p mem)
for {
a := auxIntToValAndOff(v.AuxInt)
s := auxToSym(v.Aux)
break
}
v.reset(Op386MOVWstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(a.Val()&0xff|c.Val()<<8), a.Off32()))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(a.Val()&0xff|c.Val()<<8, a.Off()))
v.Aux = symToAux(s)
v.AddArg2(p, mem)
return true
}
// match: (MOVBstoreconst [c] {s} p1 x:(MOVBstoreconst [a] {s} p0 mem))
// cond: x.Uses == 1 && a.Off() == c.Off() && sequentialAddresses(p0, p1, 1) && clobber(x)
- // result: (MOVWstoreconst [makeValAndOff32(int32(a.Val()&0xff | c.Val()<<8), a.Off32())] {s} p0 mem)
+ // result: (MOVWstoreconst [makeValAndOff(a.Val()&0xff | c.Val()<<8, a.Off())] {s} p0 mem)
for {
c := auxIntToValAndOff(v.AuxInt)
s := auxToSym(v.Aux)
break
}
v.reset(Op386MOVWstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(a.Val()&0xff|c.Val()<<8), a.Off32()))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(a.Val()&0xff|c.Val()<<8, a.Off()))
v.Aux = symToAux(s)
v.AddArg2(p0, mem)
return true
}
// match: (MOVBstoreconst [a] {s} p0 x:(MOVBstoreconst [c] {s} p1 mem))
// cond: x.Uses == 1 && a.Off() == c.Off() && sequentialAddresses(p0, p1, 1) && clobber(x)
- // result: (MOVWstoreconst [makeValAndOff32(int32(a.Val()&0xff | c.Val()<<8), a.Off32())] {s} p0 mem)
+ // result: (MOVWstoreconst [makeValAndOff(a.Val()&0xff | c.Val()<<8, a.Off())] {s} p0 mem)
for {
a := auxIntToValAndOff(v.AuxInt)
s := auxToSym(v.Aux)
break
}
v.reset(Op386MOVWstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(a.Val()&0xff|c.Val()<<8), a.Off32()))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(a.Val()&0xff|c.Val()<<8, a.Off()))
v.Aux = symToAux(s)
v.AddArg2(p0, mem)
return true
return true
}
// match: (MOVLstore [off] {sym} ptr (MOVLconst [c]) mem)
- // cond: validOff(int64(off))
- // result: (MOVLstoreconst [makeValAndOff32(c,off)] {sym} ptr mem)
+ // result: (MOVLstoreconst [makeValAndOff(c,off)] {sym} ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
c := auxIntToInt32(v_1.AuxInt)
mem := v_2
- if !(validOff(int64(off))) {
- break
- }
v.reset(Op386MOVLstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(c, off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(c, off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
break
}
// match: (MOVLstore {sym} [off] ptr y:(ADDLconst [c] l:(MOVLload [off] {sym} ptr mem)) mem)
- // cond: y.Uses==1 && l.Uses==1 && clobber(y, l) && validValAndOff(int64(c),int64(off))
- // result: (ADDLconstmodify [makeValAndOff32(c,off)] {sym} ptr mem)
+ // cond: y.Uses==1 && l.Uses==1 && clobber(y, l)
+ // result: (ADDLconstmodify [makeValAndOff(c,off)] {sym} ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
break
}
mem := l.Args[1]
- if ptr != l.Args[0] || mem != v_2 || !(y.Uses == 1 && l.Uses == 1 && clobber(y, l) && validValAndOff(int64(c), int64(off))) {
+ if ptr != l.Args[0] || mem != v_2 || !(y.Uses == 1 && l.Uses == 1 && clobber(y, l)) {
break
}
v.reset(Op386ADDLconstmodify)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(c, off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(c, off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVLstore {sym} [off] ptr y:(ANDLconst [c] l:(MOVLload [off] {sym} ptr mem)) mem)
- // cond: y.Uses==1 && l.Uses==1 && clobber(y, l) && validValAndOff(int64(c),int64(off))
- // result: (ANDLconstmodify [makeValAndOff32(c,off)] {sym} ptr mem)
+ // cond: y.Uses==1 && l.Uses==1 && clobber(y, l)
+ // result: (ANDLconstmodify [makeValAndOff(c,off)] {sym} ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
break
}
mem := l.Args[1]
- if ptr != l.Args[0] || mem != v_2 || !(y.Uses == 1 && l.Uses == 1 && clobber(y, l) && validValAndOff(int64(c), int64(off))) {
+ if ptr != l.Args[0] || mem != v_2 || !(y.Uses == 1 && l.Uses == 1 && clobber(y, l)) {
break
}
v.reset(Op386ANDLconstmodify)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(c, off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(c, off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVLstore {sym} [off] ptr y:(ORLconst [c] l:(MOVLload [off] {sym} ptr mem)) mem)
- // cond: y.Uses==1 && l.Uses==1 && clobber(y, l) && validValAndOff(int64(c),int64(off))
- // result: (ORLconstmodify [makeValAndOff32(c,off)] {sym} ptr mem)
+ // cond: y.Uses==1 && l.Uses==1 && clobber(y, l)
+ // result: (ORLconstmodify [makeValAndOff(c,off)] {sym} ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
break
}
mem := l.Args[1]
- if ptr != l.Args[0] || mem != v_2 || !(y.Uses == 1 && l.Uses == 1 && clobber(y, l) && validValAndOff(int64(c), int64(off))) {
+ if ptr != l.Args[0] || mem != v_2 || !(y.Uses == 1 && l.Uses == 1 && clobber(y, l)) {
break
}
v.reset(Op386ORLconstmodify)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(c, off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(c, off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVLstore {sym} [off] ptr y:(XORLconst [c] l:(MOVLload [off] {sym} ptr mem)) mem)
- // cond: y.Uses==1 && l.Uses==1 && clobber(y, l) && validValAndOff(int64(c),int64(off))
- // result: (XORLconstmodify [makeValAndOff32(c,off)] {sym} ptr mem)
+ // cond: y.Uses==1 && l.Uses==1 && clobber(y, l)
+ // result: (XORLconstmodify [makeValAndOff(c,off)] {sym} ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
break
}
mem := l.Args[1]
- if ptr != l.Args[0] || mem != v_2 || !(y.Uses == 1 && l.Uses == 1 && clobber(y, l) && validValAndOff(int64(c), int64(off))) {
+ if ptr != l.Args[0] || mem != v_2 || !(y.Uses == 1 && l.Uses == 1 && clobber(y, l)) {
break
}
v.reset(Op386XORLconstmodify)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(c, off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(c, off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
return true
}
// match: (MOVWstore [off] {sym} ptr (MOVLconst [c]) mem)
- // cond: validOff(int64(off))
- // result: (MOVWstoreconst [makeValAndOff32(c,off)] {sym} ptr mem)
+ // result: (MOVWstoreconst [makeValAndOff(c,off)] {sym} ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
c := auxIntToInt32(v_1.AuxInt)
mem := v_2
- if !(validOff(int64(off))) {
- break
- }
v.reset(Op386MOVWstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(c, off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(c, off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVWstoreconst [c] {s} p x:(MOVWstoreconst [a] {s} p mem))
// cond: x.Uses == 1 && a.Off() + 2 == c.Off() && clobber(x)
- // result: (MOVLstoreconst [makeValAndOff32(int32(a.Val()&0xffff | c.Val()<<16), a.Off32())] {s} p mem)
+ // result: (MOVLstoreconst [makeValAndOff(a.Val()&0xffff | c.Val()<<16, a.Off())] {s} p mem)
for {
c := auxIntToValAndOff(v.AuxInt)
s := auxToSym(v.Aux)
break
}
v.reset(Op386MOVLstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(a.Val()&0xffff|c.Val()<<16), a.Off32()))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(a.Val()&0xffff|c.Val()<<16, a.Off()))
v.Aux = symToAux(s)
v.AddArg2(p, mem)
return true
}
// match: (MOVWstoreconst [a] {s} p x:(MOVWstoreconst [c] {s} p mem))
// cond: x.Uses == 1 && ValAndOff(a).Off() + 2 == ValAndOff(c).Off() && clobber(x)
- // result: (MOVLstoreconst [makeValAndOff32(int32(a.Val()&0xffff | c.Val()<<16), a.Off32())] {s} p mem)
+ // result: (MOVLstoreconst [makeValAndOff(a.Val()&0xffff | c.Val()<<16, a.Off())] {s} p mem)
for {
a := auxIntToValAndOff(v.AuxInt)
s := auxToSym(v.Aux)
break
}
v.reset(Op386MOVLstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(a.Val()&0xffff|c.Val()<<16), a.Off32()))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(a.Val()&0xffff|c.Val()<<16, a.Off()))
v.Aux = symToAux(s)
v.AddArg2(p, mem)
return true
}
// match: (MOVWstoreconst [c] {s} p1 x:(MOVWstoreconst [a] {s} p0 mem))
// cond: x.Uses == 1 && a.Off() == c.Off() && sequentialAddresses(p0, p1, 2) && clobber(x)
- // result: (MOVLstoreconst [makeValAndOff32(int32(a.Val()&0xffff | c.Val()<<16), a.Off32())] {s} p0 mem)
+ // result: (MOVLstoreconst [makeValAndOff(a.Val()&0xffff | c.Val()<<16, a.Off())] {s} p0 mem)
for {
c := auxIntToValAndOff(v.AuxInt)
s := auxToSym(v.Aux)
break
}
v.reset(Op386MOVLstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(a.Val()&0xffff|c.Val()<<16), a.Off32()))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(a.Val()&0xffff|c.Val()<<16, a.Off()))
v.Aux = symToAux(s)
v.AddArg2(p0, mem)
return true
}
// match: (MOVWstoreconst [a] {s} p0 x:(MOVWstoreconst [c] {s} p1 mem))
// cond: x.Uses == 1 && a.Off() == c.Off() && sequentialAddresses(p0, p1, 2) && clobber(x)
- // result: (MOVLstoreconst [makeValAndOff32(int32(a.Val()&0xffff | c.Val()<<16), a.Off32())] {s} p0 mem)
+ // result: (MOVLstoreconst [makeValAndOff(a.Val()&0xffff | c.Val()<<16, a.Off())] {s} p0 mem)
for {
a := auxIntToValAndOff(v.AuxInt)
s := auxToSym(v.Aux)
break
}
v.reset(Op386MOVLstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(a.Val()&0xffff|c.Val()<<16), a.Off32()))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(a.Val()&0xffff|c.Val()<<16, a.Off()))
v.Aux = symToAux(s)
v.AddArg2(p0, mem)
return true
return true
}
// match: (Zero [3] destptr mem)
- // result: (MOVBstoreconst [makeValAndOff32(0,2)] destptr (MOVWstoreconst [makeValAndOff32(0,0)] destptr mem))
+ // result: (MOVBstoreconst [makeValAndOff(0,2)] destptr (MOVWstoreconst [makeValAndOff(0,0)] destptr mem))
for {
if auxIntToInt64(v.AuxInt) != 3 {
break
destptr := v_0
mem := v_1
v.reset(Op386MOVBstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 2))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 2))
v0 := b.NewValue0(v.Pos, Op386MOVWstoreconst, types.TypeMem)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 0))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
v0.AddArg2(destptr, mem)
v.AddArg2(destptr, v0)
return true
}
// match: (Zero [5] destptr mem)
- // result: (MOVBstoreconst [makeValAndOff32(0,4)] destptr (MOVLstoreconst [makeValAndOff32(0,0)] destptr mem))
+ // result: (MOVBstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [makeValAndOff(0,0)] destptr mem))
for {
if auxIntToInt64(v.AuxInt) != 5 {
break
destptr := v_0
mem := v_1
v.reset(Op386MOVBstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 4))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 4))
v0 := b.NewValue0(v.Pos, Op386MOVLstoreconst, types.TypeMem)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 0))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
v0.AddArg2(destptr, mem)
v.AddArg2(destptr, v0)
return true
}
// match: (Zero [6] destptr mem)
- // result: (MOVWstoreconst [makeValAndOff32(0,4)] destptr (MOVLstoreconst [makeValAndOff32(0,0)] destptr mem))
+ // result: (MOVWstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [makeValAndOff(0,0)] destptr mem))
for {
if auxIntToInt64(v.AuxInt) != 6 {
break
destptr := v_0
mem := v_1
v.reset(Op386MOVWstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 4))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 4))
v0 := b.NewValue0(v.Pos, Op386MOVLstoreconst, types.TypeMem)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 0))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
v0.AddArg2(destptr, mem)
v.AddArg2(destptr, v0)
return true
}
// match: (Zero [7] destptr mem)
- // result: (MOVLstoreconst [makeValAndOff32(0,3)] destptr (MOVLstoreconst [makeValAndOff32(0,0)] destptr mem))
+ // result: (MOVLstoreconst [makeValAndOff(0,3)] destptr (MOVLstoreconst [makeValAndOff(0,0)] destptr mem))
for {
if auxIntToInt64(v.AuxInt) != 7 {
break
destptr := v_0
mem := v_1
v.reset(Op386MOVLstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 3))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 3))
v0 := b.NewValue0(v.Pos, Op386MOVLstoreconst, types.TypeMem)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 0))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
v0.AddArg2(destptr, mem)
v.AddArg2(destptr, v0)
return true
return true
}
// match: (Zero [8] destptr mem)
- // result: (MOVLstoreconst [makeValAndOff32(0,4)] destptr (MOVLstoreconst [makeValAndOff32(0,0)] destptr mem))
+ // result: (MOVLstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [makeValAndOff(0,0)] destptr mem))
for {
if auxIntToInt64(v.AuxInt) != 8 {
break
destptr := v_0
mem := v_1
v.reset(Op386MOVLstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 4))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 4))
v0 := b.NewValue0(v.Pos, Op386MOVLstoreconst, types.TypeMem)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 0))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
v0.AddArg2(destptr, mem)
v.AddArg2(destptr, v0)
return true
}
// match: (Zero [12] destptr mem)
- // result: (MOVLstoreconst [makeValAndOff32(0,8)] destptr (MOVLstoreconst [makeValAndOff32(0,4)] destptr (MOVLstoreconst [makeValAndOff32(0,0)] destptr mem)))
+ // result: (MOVLstoreconst [makeValAndOff(0,8)] destptr (MOVLstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [makeValAndOff(0,0)] destptr mem)))
for {
if auxIntToInt64(v.AuxInt) != 12 {
break
destptr := v_0
mem := v_1
v.reset(Op386MOVLstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 8))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 8))
v0 := b.NewValue0(v.Pos, Op386MOVLstoreconst, types.TypeMem)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 4))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 4))
v1 := b.NewValue0(v.Pos, Op386MOVLstoreconst, types.TypeMem)
- v1.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 0))
+ v1.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
v1.AddArg2(destptr, mem)
v0.AddArg2(destptr, v1)
v.AddArg2(destptr, v0)
return true
}
// match: (Zero [16] destptr mem)
- // result: (MOVLstoreconst [makeValAndOff32(0,12)] destptr (MOVLstoreconst [makeValAndOff32(0,8)] destptr (MOVLstoreconst [makeValAndOff32(0,4)] destptr (MOVLstoreconst [makeValAndOff32(0,0)] destptr mem))))
+ // result: (MOVLstoreconst [makeValAndOff(0,12)] destptr (MOVLstoreconst [makeValAndOff(0,8)] destptr (MOVLstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [makeValAndOff(0,0)] destptr mem))))
for {
if auxIntToInt64(v.AuxInt) != 16 {
break
destptr := v_0
mem := v_1
v.reset(Op386MOVLstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 12))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 12))
v0 := b.NewValue0(v.Pos, Op386MOVLstoreconst, types.TypeMem)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 8))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 8))
v1 := b.NewValue0(v.Pos, Op386MOVLstoreconst, types.TypeMem)
- v1.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 4))
+ v1.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 4))
v2 := b.NewValue0(v.Pos, Op386MOVLstoreconst, types.TypeMem)
- v2.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 0))
+ v2.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
v2.AddArg2(destptr, mem)
v1.AddArg2(destptr, v2)
v0.AddArg2(destptr, v1)
b := v.Block
typ := &b.Func.Config.Types
// match: (CMPBconstload {sym} [vo] ptr mem)
- // result: (CMPBconst (MOVBload {sym} [vo.Off32()] ptr mem) [vo.Val8()])
+ // result: (CMPBconst (MOVBload {sym} [vo.Off()] ptr mem) [vo.Val8()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
v.reset(Op386CMPBconst)
v.AuxInt = int8ToAuxInt(vo.Val8())
v0 := b.NewValue0(v.Pos, Op386MOVBload, typ.UInt8)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg2(ptr, mem)
v.AddArg(v0)
b := v.Block
typ := &b.Func.Config.Types
// match: (CMPLconstload {sym} [vo] ptr mem)
- // result: (CMPLconst (MOVLload {sym} [vo.Off32()] ptr mem) [vo.Val32()])
+ // result: (CMPLconst (MOVLload {sym} [vo.Off()] ptr mem) [vo.Val()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
ptr := v_0
mem := v_1
v.reset(Op386CMPLconst)
- v.AuxInt = int32ToAuxInt(vo.Val32())
+ v.AuxInt = int32ToAuxInt(vo.Val())
v0 := b.NewValue0(v.Pos, Op386MOVLload, typ.UInt32)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg2(ptr, mem)
v.AddArg(v0)
b := v.Block
typ := &b.Func.Config.Types
// match: (CMPWconstload {sym} [vo] ptr mem)
- // result: (CMPWconst (MOVWload {sym} [vo.Off32()] ptr mem) [vo.Val16()])
+ // result: (CMPWconst (MOVWload {sym} [vo.Off()] ptr mem) [vo.Val16()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
v.reset(Op386CMPWconst)
v.AuxInt = int16ToAuxInt(vo.Val16())
v0 := b.NewValue0(v.Pos, Op386MOVWload, typ.UInt16)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg2(ptr, mem)
v.AddArg(v0)
}
// match: (CMPBconst l:(MOVBload {sym} [off] ptr mem) [c])
// cond: l.Uses == 1 && clobber(l)
- // result: @l.Block (CMPBconstload {sym} [makeValAndOff32(int32(c),off)] ptr mem)
+ // result: @l.Block (CMPBconstload {sym} [makeValAndOff(int32(c),off)] ptr mem)
for {
c := auxIntToInt8(v.AuxInt)
l := v_0
b = l.Block
v0 := b.NewValue0(l.Pos, OpAMD64CMPBconstload, types.TypeFlags)
v.copyOf(v0)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), off))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v0.Aux = symToAux(sym)
v0.AddArg2(ptr, mem)
return true
return true
}
// match: (CMPBload {sym} [off] ptr (MOVLconst [c]) mem)
- // cond: validValAndOff(int64(int8(c)),int64(off))
- // result: (CMPBconstload {sym} [makeValAndOff32(int32(int8(c)),off)] ptr mem)
+ // result: (CMPBconstload {sym} [makeValAndOff(int32(int8(c)),off)] ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
c := auxIntToInt32(v_1.AuxInt)
mem := v_2
- if !(validValAndOff(int64(int8(c)), int64(off))) {
- break
- }
v.reset(OpAMD64CMPBconstload)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(int8(c)), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(int8(c)), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (CMPLconst l:(MOVLload {sym} [off] ptr mem) [c])
// cond: l.Uses == 1 && clobber(l)
- // result: @l.Block (CMPLconstload {sym} [makeValAndOff32(c,off)] ptr mem)
+ // result: @l.Block (CMPLconstload {sym} [makeValAndOff(c,off)] ptr mem)
for {
c := auxIntToInt32(v.AuxInt)
l := v_0
b = l.Block
v0 := b.NewValue0(l.Pos, OpAMD64CMPLconstload, types.TypeFlags)
v.copyOf(v0)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(c, off))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(c, off))
v0.Aux = symToAux(sym)
v0.AddArg2(ptr, mem)
return true
return true
}
// match: (CMPLload {sym} [off] ptr (MOVLconst [c]) mem)
- // cond: validValAndOff(int64(c),int64(off))
- // result: (CMPLconstload {sym} [makeValAndOff32(c,off)] ptr mem)
+ // result: (CMPLconstload {sym} [makeValAndOff(c,off)] ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
c := auxIntToInt32(v_1.AuxInt)
mem := v_2
- if !(validValAndOff(int64(c), int64(off))) {
- break
- }
v.reset(OpAMD64CMPLconstload)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(c, off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(c, off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (CMPQconst l:(MOVQload {sym} [off] ptr mem) [c])
// cond: l.Uses == 1 && clobber(l)
- // result: @l.Block (CMPQconstload {sym} [makeValAndOff32(c,off)] ptr mem)
+ // result: @l.Block (CMPQconstload {sym} [makeValAndOff(c,off)] ptr mem)
for {
c := auxIntToInt32(v.AuxInt)
l := v_0
b = l.Block
v0 := b.NewValue0(l.Pos, OpAMD64CMPQconstload, types.TypeFlags)
v.copyOf(v0)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(c, off))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(c, off))
v0.Aux = symToAux(sym)
v0.AddArg2(ptr, mem)
return true
return true
}
// match: (CMPQload {sym} [off] ptr (MOVQconst [c]) mem)
- // cond: validValAndOff(c,int64(off))
- // result: (CMPQconstload {sym} [makeValAndOff64(c,int64(off))] ptr mem)
+ // cond: validVal(c)
+ // result: (CMPQconstload {sym} [makeValAndOff(int32(c),off)] ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
c := auxIntToInt64(v_1.AuxInt)
mem := v_2
- if !(validValAndOff(c, int64(off))) {
+ if !(validVal(c)) {
break
}
v.reset(OpAMD64CMPQconstload)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff64(c, int64(off)))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (CMPWconst l:(MOVWload {sym} [off] ptr mem) [c])
// cond: l.Uses == 1 && clobber(l)
- // result: @l.Block (CMPWconstload {sym} [makeValAndOff32(int32(c),off)] ptr mem)
+ // result: @l.Block (CMPWconstload {sym} [makeValAndOff(int32(c),off)] ptr mem)
for {
c := auxIntToInt16(v.AuxInt)
l := v_0
b = l.Block
v0 := b.NewValue0(l.Pos, OpAMD64CMPWconstload, types.TypeFlags)
v.copyOf(v0)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), off))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v0.Aux = symToAux(sym)
v0.AddArg2(ptr, mem)
return true
return true
}
// match: (CMPWload {sym} [off] ptr (MOVLconst [c]) mem)
- // cond: validValAndOff(int64(int16(c)),int64(off))
- // result: (CMPWconstload {sym} [makeValAndOff32(int32(int16(c)),off)] ptr mem)
+ // result: (CMPWconstload {sym} [makeValAndOff(int32(int16(c)),off)] ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
c := auxIntToInt32(v_1.AuxInt)
mem := v_2
- if !(validValAndOff(int64(int16(c)), int64(off))) {
- break
- }
v.reset(OpAMD64CMPWconstload)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(int16(c)), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(int16(c)), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
return true
}
// match: (MOVBstore [off] {sym} ptr (MOVLconst [c]) mem)
- // result: (MOVBstoreconst [makeValAndOff32(int32(int8(c)),off)] {sym} ptr mem)
+ // result: (MOVBstoreconst [makeValAndOff(int32(int8(c)),off)] {sym} ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
c := auxIntToInt32(v_1.AuxInt)
mem := v_2
v.reset(OpAMD64MOVBstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(int8(c)), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(int8(c)), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVBstore [off] {sym} ptr (MOVQconst [c]) mem)
- // result: (MOVBstoreconst [makeValAndOff32(int32(int8(c)),off)] {sym} ptr mem)
+ // result: (MOVBstoreconst [makeValAndOff(int32(int8(c)),off)] {sym} ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
c := auxIntToInt64(v_1.AuxInt)
mem := v_2
v.reset(OpAMD64MOVBstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(int8(c)), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(int8(c)), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVBstoreconst [c] {s} p x:(MOVBstoreconst [a] {s} p mem))
// cond: x.Uses == 1 && a.Off() + 1 == c.Off() && clobber(x)
- // result: (MOVWstoreconst [makeValAndOff64(a.Val()&0xff | c.Val()<<8, a.Off())] {s} p mem)
+ // result: (MOVWstoreconst [makeValAndOff(a.Val()&0xff | c.Val()<<8, a.Off())] {s} p mem)
for {
c := auxIntToValAndOff(v.AuxInt)
s := auxToSym(v.Aux)
break
}
v.reset(OpAMD64MOVWstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff64(a.Val()&0xff|c.Val()<<8, a.Off()))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(a.Val()&0xff|c.Val()<<8, a.Off()))
v.Aux = symToAux(s)
v.AddArg2(p, mem)
return true
}
// match: (MOVBstoreconst [a] {s} p x:(MOVBstoreconst [c] {s} p mem))
// cond: x.Uses == 1 && a.Off() + 1 == c.Off() && clobber(x)
- // result: (MOVWstoreconst [makeValAndOff64(a.Val()&0xff | c.Val()<<8, a.Off())] {s} p mem)
+ // result: (MOVWstoreconst [makeValAndOff(a.Val()&0xff | c.Val()<<8, a.Off())] {s} p mem)
for {
a := auxIntToValAndOff(v.AuxInt)
s := auxToSym(v.Aux)
break
}
v.reset(OpAMD64MOVWstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff64(a.Val()&0xff|c.Val()<<8, a.Off()))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(a.Val()&0xff|c.Val()<<8, a.Off()))
v.Aux = symToAux(s)
v.AddArg2(p, mem)
return true
return true
}
// match: (MOVLstore [off] {sym} ptr (MOVLconst [c]) mem)
- // result: (MOVLstoreconst [makeValAndOff32(int32(c),off)] {sym} ptr mem)
+ // result: (MOVLstoreconst [makeValAndOff(int32(c),off)] {sym} ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
c := auxIntToInt32(v_1.AuxInt)
mem := v_2
v.reset(OpAMD64MOVLstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVLstore [off] {sym} ptr (MOVQconst [c]) mem)
- // result: (MOVLstoreconst [makeValAndOff32(int32(c),off)] {sym} ptr mem)
+ // result: (MOVLstoreconst [makeValAndOff(int32(c),off)] {sym} ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
c := auxIntToInt64(v_1.AuxInt)
mem := v_2
v.reset(OpAMD64MOVLstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
return true
}
// match: (MOVLstore [off] {sym} ptr a:(ADDLconst [c] l:(MOVLload [off] {sym} ptr2 mem)) mem)
- // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c),int64(off)) && clobber(l, a)
- // result: (ADDLconstmodify {sym} [makeValAndOff32(int32(c),off)] ptr mem)
+ // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)
+ // result: (ADDLconstmodify {sym} [makeValAndOff(int32(c),off)] ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
mem := l.Args[1]
ptr2 := l.Args[0]
- if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c), int64(off)) && clobber(l, a)) {
+ if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)) {
break
}
v.reset(OpAMD64ADDLconstmodify)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVLstore [off] {sym} ptr a:(ANDLconst [c] l:(MOVLload [off] {sym} ptr2 mem)) mem)
- // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c),int64(off)) && clobber(l, a)
- // result: (ANDLconstmodify {sym} [makeValAndOff32(int32(c),off)] ptr mem)
+ // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)
+ // result: (ANDLconstmodify {sym} [makeValAndOff(int32(c),off)] ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
mem := l.Args[1]
ptr2 := l.Args[0]
- if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c), int64(off)) && clobber(l, a)) {
+ if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)) {
break
}
v.reset(OpAMD64ANDLconstmodify)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVLstore [off] {sym} ptr a:(ORLconst [c] l:(MOVLload [off] {sym} ptr2 mem)) mem)
- // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c),int64(off)) && clobber(l, a)
- // result: (ORLconstmodify {sym} [makeValAndOff32(int32(c),off)] ptr mem)
+ // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)
+ // result: (ORLconstmodify {sym} [makeValAndOff(int32(c),off)] ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
mem := l.Args[1]
ptr2 := l.Args[0]
- if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c), int64(off)) && clobber(l, a)) {
+ if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)) {
break
}
v.reset(OpAMD64ORLconstmodify)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVLstore [off] {sym} ptr a:(XORLconst [c] l:(MOVLload [off] {sym} ptr2 mem)) mem)
- // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c),int64(off)) && clobber(l, a)
- // result: (XORLconstmodify {sym} [makeValAndOff32(int32(c),off)] ptr mem)
+ // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)
+ // result: (XORLconstmodify {sym} [makeValAndOff(int32(c),off)] ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
mem := l.Args[1]
ptr2 := l.Args[0]
- if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c), int64(off)) && clobber(l, a)) {
+ if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)) {
break
}
v.reset(OpAMD64XORLconstmodify)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVLstore [off] {sym} ptr a:(BTCLconst [c] l:(MOVLload [off] {sym} ptr2 mem)) mem)
- // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c),int64(off)) && clobber(l, a)
- // result: (BTCLconstmodify {sym} [makeValAndOff32(int32(c),off)] ptr mem)
+ // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)
+ // result: (BTCLconstmodify {sym} [makeValAndOff(int32(c),off)] ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
mem := l.Args[1]
ptr2 := l.Args[0]
- if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c), int64(off)) && clobber(l, a)) {
+ if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)) {
break
}
v.reset(OpAMD64BTCLconstmodify)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVLstore [off] {sym} ptr a:(BTRLconst [c] l:(MOVLload [off] {sym} ptr2 mem)) mem)
- // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c),int64(off)) && clobber(l, a)
- // result: (BTRLconstmodify {sym} [makeValAndOff32(int32(c),off)] ptr mem)
+ // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)
+ // result: (BTRLconstmodify {sym} [makeValAndOff(int32(c),off)] ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
mem := l.Args[1]
ptr2 := l.Args[0]
- if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c), int64(off)) && clobber(l, a)) {
+ if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)) {
break
}
v.reset(OpAMD64BTRLconstmodify)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVLstore [off] {sym} ptr a:(BTSLconst [c] l:(MOVLload [off] {sym} ptr2 mem)) mem)
- // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c),int64(off)) && clobber(l, a)
- // result: (BTSLconstmodify {sym} [makeValAndOff32(int32(c),off)] ptr mem)
+ // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)
+ // result: (BTSLconstmodify {sym} [makeValAndOff(int32(c),off)] ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
mem := l.Args[1]
ptr2 := l.Args[0]
- if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c), int64(off)) && clobber(l, a)) {
+ if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)) {
break
}
v.reset(OpAMD64BTSLconstmodify)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVLstoreconst [c] {s} p x:(MOVLstoreconst [a] {s} p mem))
// cond: x.Uses == 1 && a.Off() + 4 == c.Off() && clobber(x)
- // result: (MOVQstore [a.Off32()] {s} p (MOVQconst [a.Val()&0xffffffff | c.Val()<<32]) mem)
+ // result: (MOVQstore [a.Off()] {s} p (MOVQconst [a.Val64()&0xffffffff | c.Val64()<<32]) mem)
for {
c := auxIntToValAndOff(v.AuxInt)
s := auxToSym(v.Aux)
break
}
v.reset(OpAMD64MOVQstore)
- v.AuxInt = int32ToAuxInt(a.Off32())
+ v.AuxInt = int32ToAuxInt(a.Off())
v.Aux = symToAux(s)
v0 := b.NewValue0(x.Pos, OpAMD64MOVQconst, typ.UInt64)
- v0.AuxInt = int64ToAuxInt(a.Val()&0xffffffff | c.Val()<<32)
+ v0.AuxInt = int64ToAuxInt(a.Val64()&0xffffffff | c.Val64()<<32)
v.AddArg3(p, v0, mem)
return true
}
// match: (MOVLstoreconst [a] {s} p x:(MOVLstoreconst [c] {s} p mem))
// cond: x.Uses == 1 && a.Off() + 4 == c.Off() && clobber(x)
- // result: (MOVQstore [a.Off32()] {s} p (MOVQconst [a.Val()&0xffffffff | c.Val()<<32]) mem)
+ // result: (MOVQstore [a.Off()] {s} p (MOVQconst [a.Val64()&0xffffffff | c.Val64()<<32]) mem)
for {
a := auxIntToValAndOff(v.AuxInt)
s := auxToSym(v.Aux)
break
}
v.reset(OpAMD64MOVQstore)
- v.AuxInt = int32ToAuxInt(a.Off32())
+ v.AuxInt = int32ToAuxInt(a.Off())
v.Aux = symToAux(s)
v0 := b.NewValue0(x.Pos, OpAMD64MOVQconst, typ.UInt64)
- v0.AuxInt = int64ToAuxInt(a.Val()&0xffffffff | c.Val()<<32)
+ v0.AuxInt = int64ToAuxInt(a.Val64()&0xffffffff | c.Val64()<<32)
v.AddArg3(p, v0, mem)
return true
}
}
// match: (MOVQstore [off] {sym} ptr (MOVQconst [c]) mem)
// cond: validVal(c)
- // result: (MOVQstoreconst [makeValAndOff32(int32(c),off)] {sym} ptr mem)
+ // result: (MOVQstoreconst [makeValAndOff(int32(c),off)] {sym} ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
break
}
v.reset(OpAMD64MOVQstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
return true
}
// match: (MOVQstore [off] {sym} ptr a:(ADDQconst [c] l:(MOVQload [off] {sym} ptr2 mem)) mem)
- // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c),int64(off)) && clobber(l, a)
- // result: (ADDQconstmodify {sym} [makeValAndOff32(int32(c),off)] ptr mem)
+ // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)
+ // result: (ADDQconstmodify {sym} [makeValAndOff(int32(c),off)] ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
mem := l.Args[1]
ptr2 := l.Args[0]
- if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c), int64(off)) && clobber(l, a)) {
+ if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)) {
break
}
v.reset(OpAMD64ADDQconstmodify)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVQstore [off] {sym} ptr a:(ANDQconst [c] l:(MOVQload [off] {sym} ptr2 mem)) mem)
- // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c),int64(off)) && clobber(l, a)
- // result: (ANDQconstmodify {sym} [makeValAndOff32(int32(c),off)] ptr mem)
+ // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)
+ // result: (ANDQconstmodify {sym} [makeValAndOff(int32(c),off)] ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
mem := l.Args[1]
ptr2 := l.Args[0]
- if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c), int64(off)) && clobber(l, a)) {
+ if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)) {
break
}
v.reset(OpAMD64ANDQconstmodify)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVQstore [off] {sym} ptr a:(ORQconst [c] l:(MOVQload [off] {sym} ptr2 mem)) mem)
- // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c),int64(off)) && clobber(l, a)
- // result: (ORQconstmodify {sym} [makeValAndOff32(int32(c),off)] ptr mem)
+ // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)
+ // result: (ORQconstmodify {sym} [makeValAndOff(int32(c),off)] ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
mem := l.Args[1]
ptr2 := l.Args[0]
- if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c), int64(off)) && clobber(l, a)) {
+ if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)) {
break
}
v.reset(OpAMD64ORQconstmodify)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVQstore [off] {sym} ptr a:(XORQconst [c] l:(MOVQload [off] {sym} ptr2 mem)) mem)
- // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c),int64(off)) && clobber(l, a)
- // result: (XORQconstmodify {sym} [makeValAndOff32(int32(c),off)] ptr mem)
+ // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)
+ // result: (XORQconstmodify {sym} [makeValAndOff(int32(c),off)] ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
mem := l.Args[1]
ptr2 := l.Args[0]
- if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c), int64(off)) && clobber(l, a)) {
+ if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)) {
break
}
v.reset(OpAMD64XORQconstmodify)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVQstore [off] {sym} ptr a:(BTCQconst [c] l:(MOVQload [off] {sym} ptr2 mem)) mem)
- // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c),int64(off)) && clobber(l, a)
- // result: (BTCQconstmodify {sym} [makeValAndOff32(int32(c),off)] ptr mem)
+ // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)
+ // result: (BTCQconstmodify {sym} [makeValAndOff(int32(c),off)] ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
mem := l.Args[1]
ptr2 := l.Args[0]
- if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c), int64(off)) && clobber(l, a)) {
+ if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)) {
break
}
v.reset(OpAMD64BTCQconstmodify)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVQstore [off] {sym} ptr a:(BTRQconst [c] l:(MOVQload [off] {sym} ptr2 mem)) mem)
- // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c),int64(off)) && clobber(l, a)
- // result: (BTRQconstmodify {sym} [makeValAndOff32(int32(c),off)] ptr mem)
+ // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)
+ // result: (BTRQconstmodify {sym} [makeValAndOff(int32(c),off)] ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
mem := l.Args[1]
ptr2 := l.Args[0]
- if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c), int64(off)) && clobber(l, a)) {
+ if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)) {
break
}
v.reset(OpAMD64BTRQconstmodify)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVQstore [off] {sym} ptr a:(BTSQconst [c] l:(MOVQload [off] {sym} ptr2 mem)) mem)
- // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c),int64(off)) && clobber(l, a)
- // result: (BTSQconstmodify {sym} [makeValAndOff32(int32(c),off)] ptr mem)
+ // cond: isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)
+ // result: (BTSQconstmodify {sym} [makeValAndOff(int32(c),off)] ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
}
mem := l.Args[1]
ptr2 := l.Args[0]
- if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(int64(c), int64(off)) && clobber(l, a)) {
+ if mem != v_2 || !(isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && clobber(l, a)) {
break
}
v.reset(OpAMD64BTSQconstmodify)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVQstoreconst [c] {s} p x:(MOVQstoreconst [c2] {s} p mem))
// cond: config.useSSE && x.Uses == 1 && c2.Off() + 8 == c.Off() && c.Val() == 0 && c2.Val() == 0 && clobber(x)
- // result: (MOVOstorezero [c2.Off32()] {s} p mem)
+ // result: (MOVOstorezero [c2.Off()] {s} p mem)
for {
c := auxIntToValAndOff(v.AuxInt)
s := auxToSym(v.Aux)
break
}
v.reset(OpAMD64MOVOstorezero)
- v.AuxInt = int32ToAuxInt(c2.Off32())
+ v.AuxInt = int32ToAuxInt(c2.Off())
v.Aux = symToAux(s)
v.AddArg2(p, mem)
return true
return true
}
// match: (MOVWstore [off] {sym} ptr (MOVLconst [c]) mem)
- // result: (MOVWstoreconst [makeValAndOff32(int32(int16(c)),off)] {sym} ptr mem)
+ // result: (MOVWstoreconst [makeValAndOff(int32(int16(c)),off)] {sym} ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
c := auxIntToInt32(v_1.AuxInt)
mem := v_2
v.reset(OpAMD64MOVWstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(int16(c)), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(int16(c)), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVWstore [off] {sym} ptr (MOVQconst [c]) mem)
- // result: (MOVWstoreconst [makeValAndOff32(int32(int16(c)),off)] {sym} ptr mem)
+ // result: (MOVWstoreconst [makeValAndOff(int32(int16(c)),off)] {sym} ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
c := auxIntToInt64(v_1.AuxInt)
mem := v_2
v.reset(OpAMD64MOVWstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(int16(c)), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(int16(c)), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
}
// match: (MOVWstoreconst [c] {s} p x:(MOVWstoreconst [a] {s} p mem))
// cond: x.Uses == 1 && a.Off() + 2 == c.Off() && clobber(x)
- // result: (MOVLstoreconst [makeValAndOff64(a.Val()&0xffff | c.Val()<<16, a.Off())] {s} p mem)
+ // result: (MOVLstoreconst [makeValAndOff(a.Val()&0xffff | c.Val()<<16, a.Off())] {s} p mem)
for {
c := auxIntToValAndOff(v.AuxInt)
s := auxToSym(v.Aux)
break
}
v.reset(OpAMD64MOVLstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff64(a.Val()&0xffff|c.Val()<<16, a.Off()))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(a.Val()&0xffff|c.Val()<<16, a.Off()))
v.Aux = symToAux(s)
v.AddArg2(p, mem)
return true
}
// match: (MOVWstoreconst [a] {s} p x:(MOVWstoreconst [c] {s} p mem))
// cond: x.Uses == 1 && a.Off() + 2 == c.Off() && clobber(x)
- // result: (MOVLstoreconst [makeValAndOff64(a.Val()&0xffff | c.Val()<<16, a.Off())] {s} p mem)
+ // result: (MOVLstoreconst [makeValAndOff(a.Val()&0xffff | c.Val()<<16, a.Off())] {s} p mem)
for {
a := auxIntToValAndOff(v.AuxInt)
s := auxToSym(v.Aux)
break
}
v.reset(OpAMD64MOVLstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff64(a.Val()&0xffff|c.Val()<<16, a.Off()))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(a.Val()&0xffff|c.Val()<<16, a.Off()))
v.Aux = symToAux(s)
v.AddArg2(p, mem)
return true
break
}
// match: (TESTB l:(MOVBload {sym} [off] ptr mem) l2)
- // cond: l == l2 && l.Uses == 2 && validValAndOff(0, int64(off)) && clobber(l)
- // result: @l.Block (CMPBconstload {sym} [makeValAndOff64(0, int64(off))] ptr mem)
+ // cond: l == l2 && l.Uses == 2 && clobber(l)
+ // result: @l.Block (CMPBconstload {sym} [makeValAndOff(0, off)] ptr mem)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
l := v_0
mem := l.Args[1]
ptr := l.Args[0]
l2 := v_1
- if !(l == l2 && l.Uses == 2 && validValAndOff(0, int64(off)) && clobber(l)) {
+ if !(l == l2 && l.Uses == 2 && clobber(l)) {
continue
}
b = l.Block
v0 := b.NewValue0(l.Pos, OpAMD64CMPBconstload, types.TypeFlags)
v.copyOf(v0)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff64(0, int64(off)))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(0, off))
v0.Aux = symToAux(sym)
v0.AddArg2(ptr, mem)
return true
break
}
// match: (TESTL l:(MOVLload {sym} [off] ptr mem) l2)
- // cond: l == l2 && l.Uses == 2 && validValAndOff(0, int64(off)) && clobber(l)
- // result: @l.Block (CMPLconstload {sym} [makeValAndOff64(0, int64(off))] ptr mem)
+ // cond: l == l2 && l.Uses == 2 && clobber(l)
+ // result: @l.Block (CMPLconstload {sym} [makeValAndOff(0, off)] ptr mem)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
l := v_0
mem := l.Args[1]
ptr := l.Args[0]
l2 := v_1
- if !(l == l2 && l.Uses == 2 && validValAndOff(0, int64(off)) && clobber(l)) {
+ if !(l == l2 && l.Uses == 2 && clobber(l)) {
continue
}
b = l.Block
v0 := b.NewValue0(l.Pos, OpAMD64CMPLconstload, types.TypeFlags)
v.copyOf(v0)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff64(0, int64(off)))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(0, off))
v0.Aux = symToAux(sym)
v0.AddArg2(ptr, mem)
return true
break
}
// match: (TESTQ l:(MOVQload {sym} [off] ptr mem) l2)
- // cond: l == l2 && l.Uses == 2 && validValAndOff(0, int64(off)) && clobber(l)
- // result: @l.Block (CMPQconstload {sym} [makeValAndOff64(0, int64(off))] ptr mem)
+ // cond: l == l2 && l.Uses == 2 && clobber(l)
+ // result: @l.Block (CMPQconstload {sym} [makeValAndOff(0, off)] ptr mem)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
l := v_0
mem := l.Args[1]
ptr := l.Args[0]
l2 := v_1
- if !(l == l2 && l.Uses == 2 && validValAndOff(0, int64(off)) && clobber(l)) {
+ if !(l == l2 && l.Uses == 2 && clobber(l)) {
continue
}
b = l.Block
v0 := b.NewValue0(l.Pos, OpAMD64CMPQconstload, types.TypeFlags)
v.copyOf(v0)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff64(0, int64(off)))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(0, off))
v0.Aux = symToAux(sym)
v0.AddArg2(ptr, mem)
return true
break
}
// match: (TESTW l:(MOVWload {sym} [off] ptr mem) l2)
- // cond: l == l2 && l.Uses == 2 && validValAndOff(0, int64(off)) && clobber(l)
- // result: @l.Block (CMPWconstload {sym} [makeValAndOff64(0, int64(off))] ptr mem)
+ // cond: l == l2 && l.Uses == 2 && clobber(l)
+ // result: @l.Block (CMPWconstload {sym} [makeValAndOff(0, off)] ptr mem)
for {
for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
l := v_0
mem := l.Args[1]
ptr := l.Args[0]
l2 := v_1
- if !(l == l2 && l.Uses == 2 && validValAndOff(0, int64(off)) && clobber(l)) {
+ if !(l == l2 && l.Uses == 2 && clobber(l)) {
continue
}
b = l.Block
v0 := b.NewValue0(l.Pos, OpAMD64CMPWconstload, types.TypeFlags)
v.copyOf(v0)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff64(0, int64(off)))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(0, off))
v0.Aux = symToAux(sym)
v0.AddArg2(ptr, mem)
return true
return true
}
// match: (Zero [1] destptr mem)
- // result: (MOVBstoreconst [makeValAndOff32(0,0)] destptr mem)
+ // result: (MOVBstoreconst [makeValAndOff(0,0)] destptr mem)
for {
if auxIntToInt64(v.AuxInt) != 1 {
break
destptr := v_0
mem := v_1
v.reset(OpAMD64MOVBstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 0))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
v.AddArg2(destptr, mem)
return true
}
// match: (Zero [2] destptr mem)
- // result: (MOVWstoreconst [makeValAndOff32(0,0)] destptr mem)
+ // result: (MOVWstoreconst [makeValAndOff(0,0)] destptr mem)
for {
if auxIntToInt64(v.AuxInt) != 2 {
break
destptr := v_0
mem := v_1
v.reset(OpAMD64MOVWstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 0))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
v.AddArg2(destptr, mem)
return true
}
// match: (Zero [4] destptr mem)
- // result: (MOVLstoreconst [makeValAndOff32(0,0)] destptr mem)
+ // result: (MOVLstoreconst [makeValAndOff(0,0)] destptr mem)
for {
if auxIntToInt64(v.AuxInt) != 4 {
break
destptr := v_0
mem := v_1
v.reset(OpAMD64MOVLstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 0))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
v.AddArg2(destptr, mem)
return true
}
// match: (Zero [8] destptr mem)
- // result: (MOVQstoreconst [makeValAndOff32(0,0)] destptr mem)
+ // result: (MOVQstoreconst [makeValAndOff(0,0)] destptr mem)
for {
if auxIntToInt64(v.AuxInt) != 8 {
break
destptr := v_0
mem := v_1
v.reset(OpAMD64MOVQstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 0))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
v.AddArg2(destptr, mem)
return true
}
// match: (Zero [3] destptr mem)
- // result: (MOVBstoreconst [makeValAndOff32(0,2)] destptr (MOVWstoreconst [makeValAndOff32(0,0)] destptr mem))
+ // result: (MOVBstoreconst [makeValAndOff(0,2)] destptr (MOVWstoreconst [makeValAndOff(0,0)] destptr mem))
for {
if auxIntToInt64(v.AuxInt) != 3 {
break
destptr := v_0
mem := v_1
v.reset(OpAMD64MOVBstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 2))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 2))
v0 := b.NewValue0(v.Pos, OpAMD64MOVWstoreconst, types.TypeMem)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 0))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
v0.AddArg2(destptr, mem)
v.AddArg2(destptr, v0)
return true
}
// match: (Zero [5] destptr mem)
- // result: (MOVBstoreconst [makeValAndOff32(0,4)] destptr (MOVLstoreconst [makeValAndOff32(0,0)] destptr mem))
+ // result: (MOVBstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [makeValAndOff(0,0)] destptr mem))
for {
if auxIntToInt64(v.AuxInt) != 5 {
break
destptr := v_0
mem := v_1
v.reset(OpAMD64MOVBstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 4))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 4))
v0 := b.NewValue0(v.Pos, OpAMD64MOVLstoreconst, types.TypeMem)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 0))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
v0.AddArg2(destptr, mem)
v.AddArg2(destptr, v0)
return true
}
// match: (Zero [6] destptr mem)
- // result: (MOVWstoreconst [makeValAndOff32(0,4)] destptr (MOVLstoreconst [makeValAndOff32(0,0)] destptr mem))
+ // result: (MOVWstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [makeValAndOff(0,0)] destptr mem))
for {
if auxIntToInt64(v.AuxInt) != 6 {
break
destptr := v_0
mem := v_1
v.reset(OpAMD64MOVWstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 4))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 4))
v0 := b.NewValue0(v.Pos, OpAMD64MOVLstoreconst, types.TypeMem)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 0))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
v0.AddArg2(destptr, mem)
v.AddArg2(destptr, v0)
return true
}
// match: (Zero [7] destptr mem)
- // result: (MOVLstoreconst [makeValAndOff32(0,3)] destptr (MOVLstoreconst [makeValAndOff32(0,0)] destptr mem))
+ // result: (MOVLstoreconst [makeValAndOff(0,3)] destptr (MOVLstoreconst [makeValAndOff(0,0)] destptr mem))
for {
if auxIntToInt64(v.AuxInt) != 7 {
break
destptr := v_0
mem := v_1
v.reset(OpAMD64MOVLstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 3))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 3))
v0 := b.NewValue0(v.Pos, OpAMD64MOVLstoreconst, types.TypeMem)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 0))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
v0.AddArg2(destptr, mem)
v.AddArg2(destptr, v0)
return true
}
// match: (Zero [s] destptr mem)
// cond: s%8 != 0 && s > 8 && !config.useSSE
- // result: (Zero [s-s%8] (OffPtr <destptr.Type> destptr [s%8]) (MOVQstoreconst [makeValAndOff32(0,0)] destptr mem))
+ // result: (Zero [s-s%8] (OffPtr <destptr.Type> destptr [s%8]) (MOVQstoreconst [makeValAndOff(0,0)] destptr mem))
for {
s := auxIntToInt64(v.AuxInt)
destptr := v_0
v0.AuxInt = int64ToAuxInt(s % 8)
v0.AddArg(destptr)
v1 := b.NewValue0(v.Pos, OpAMD64MOVQstoreconst, types.TypeMem)
- v1.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 0))
+ v1.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
v1.AddArg2(destptr, mem)
v.AddArg2(v0, v1)
return true
}
// match: (Zero [16] destptr mem)
// cond: !config.useSSE
- // result: (MOVQstoreconst [makeValAndOff32(0,8)] destptr (MOVQstoreconst [makeValAndOff32(0,0)] destptr mem))
+ // result: (MOVQstoreconst [makeValAndOff(0,8)] destptr (MOVQstoreconst [makeValAndOff(0,0)] destptr mem))
for {
if auxIntToInt64(v.AuxInt) != 16 {
break
break
}
v.reset(OpAMD64MOVQstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 8))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 8))
v0 := b.NewValue0(v.Pos, OpAMD64MOVQstoreconst, types.TypeMem)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 0))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
v0.AddArg2(destptr, mem)
v.AddArg2(destptr, v0)
return true
}
// match: (Zero [24] destptr mem)
// cond: !config.useSSE
- // result: (MOVQstoreconst [makeValAndOff32(0,16)] destptr (MOVQstoreconst [makeValAndOff32(0,8)] destptr (MOVQstoreconst [makeValAndOff32(0,0)] destptr mem)))
+ // result: (MOVQstoreconst [makeValAndOff(0,16)] destptr (MOVQstoreconst [makeValAndOff(0,8)] destptr (MOVQstoreconst [makeValAndOff(0,0)] destptr mem)))
for {
if auxIntToInt64(v.AuxInt) != 24 {
break
break
}
v.reset(OpAMD64MOVQstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 16))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 16))
v0 := b.NewValue0(v.Pos, OpAMD64MOVQstoreconst, types.TypeMem)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 8))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 8))
v1 := b.NewValue0(v.Pos, OpAMD64MOVQstoreconst, types.TypeMem)
- v1.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 0))
+ v1.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
v1.AddArg2(destptr, mem)
v0.AddArg2(destptr, v1)
v.AddArg2(destptr, v0)
}
// match: (Zero [32] destptr mem)
// cond: !config.useSSE
- // result: (MOVQstoreconst [makeValAndOff32(0,24)] destptr (MOVQstoreconst [makeValAndOff32(0,16)] destptr (MOVQstoreconst [makeValAndOff32(0,8)] destptr (MOVQstoreconst [makeValAndOff32(0,0)] destptr mem))))
+ // result: (MOVQstoreconst [makeValAndOff(0,24)] destptr (MOVQstoreconst [makeValAndOff(0,16)] destptr (MOVQstoreconst [makeValAndOff(0,8)] destptr (MOVQstoreconst [makeValAndOff(0,0)] destptr mem))))
for {
if auxIntToInt64(v.AuxInt) != 32 {
break
break
}
v.reset(OpAMD64MOVQstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 24))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 24))
v0 := b.NewValue0(v.Pos, OpAMD64MOVQstoreconst, types.TypeMem)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 16))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 16))
v1 := b.NewValue0(v.Pos, OpAMD64MOVQstoreconst, types.TypeMem)
- v1.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 8))
+ v1.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 8))
v2 := b.NewValue0(v.Pos, OpAMD64MOVQstoreconst, types.TypeMem)
- v2.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 0))
+ v2.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
v2.AddArg2(destptr, mem)
v1.AddArg2(destptr, v2)
v0.AddArg2(destptr, v1)
}
// match: (Zero [s] destptr mem)
// cond: s > 8 && s < 16 && config.useSSE
- // result: (MOVQstoreconst [makeValAndOff32(0,int32(s-8))] destptr (MOVQstoreconst [makeValAndOff32(0,0)] destptr mem))
+ // result: (MOVQstoreconst [makeValAndOff(0,int32(s-8))] destptr (MOVQstoreconst [makeValAndOff(0,0)] destptr mem))
for {
s := auxIntToInt64(v.AuxInt)
destptr := v_0
break
}
v.reset(OpAMD64MOVQstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, int32(s-8)))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, int32(s-8)))
v0 := b.NewValue0(v.Pos, OpAMD64MOVQstoreconst, types.TypeMem)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 0))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
v0.AddArg2(destptr, mem)
v.AddArg2(destptr, v0)
return true
}
// match: (Zero [s] destptr mem)
// cond: s%16 != 0 && s > 16 && s%16 <= 8 && config.useSSE
- // result: (Zero [s-s%16] (OffPtr <destptr.Type> destptr [s%16]) (MOVQstoreconst [makeValAndOff32(0,0)] destptr mem))
+ // result: (Zero [s-s%16] (OffPtr <destptr.Type> destptr [s%16]) (MOVQstoreconst [makeValAndOff(0,0)] destptr mem))
for {
s := auxIntToInt64(v.AuxInt)
destptr := v_0
v0.AuxInt = int64ToAuxInt(s % 16)
v0.AddArg(destptr)
v1 := b.NewValue0(v.Pos, OpAMD64MOVQstoreconst, types.TypeMem)
- v1.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 0))
+ v1.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 0))
v1.AddArg2(destptr, mem)
v.AddArg2(v0, v1)
return true
typ := &b.Func.Config.Types
// match: (CMPBconstload {sym} [vo] ptr mem)
// cond: vo.Val() == 0
- // result: (TESTB x:(MOVBload {sym} [vo.Off32()] ptr mem) x)
+ // result: (TESTB x:(MOVBload {sym} [vo.Off()] ptr mem) x)
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
}
v.reset(OpAMD64TESTB)
x := b.NewValue0(v.Pos, OpAMD64MOVBload, typ.UInt8)
- x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.AuxInt = int32ToAuxInt(vo.Off())
x.Aux = symToAux(sym)
x.AddArg2(ptr, mem)
v.AddArg2(x, x)
}
// match: (CMPBconstload {sym} [vo] ptr mem)
// cond: vo.Val() != 0
- // result: (CMPBconst (MOVBload {sym} [vo.Off32()] ptr mem) [vo.Val8()])
+ // result: (CMPBconst (MOVBload {sym} [vo.Off()] ptr mem) [vo.Val8()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
v.reset(OpAMD64CMPBconst)
v.AuxInt = int8ToAuxInt(vo.Val8())
v0 := b.NewValue0(v.Pos, OpAMD64MOVBload, typ.UInt8)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg2(ptr, mem)
v.AddArg(v0)
typ := &b.Func.Config.Types
// match: (CMPBconstloadidx1 {sym} [vo] ptr idx mem)
// cond: vo.Val() == 0
- // result: (TESTB x:(MOVBloadidx1 {sym} [vo.Off32()] ptr idx mem) x)
+ // result: (TESTB x:(MOVBloadidx1 {sym} [vo.Off()] ptr idx mem) x)
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
}
v.reset(OpAMD64TESTB)
x := b.NewValue0(v.Pos, OpAMD64MOVBloadidx1, typ.UInt8)
- x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.AuxInt = int32ToAuxInt(vo.Off())
x.Aux = symToAux(sym)
x.AddArg3(ptr, idx, mem)
v.AddArg2(x, x)
}
// match: (CMPBconstloadidx1 {sym} [vo] ptr idx mem)
// cond: vo.Val() != 0
- // result: (CMPBconst (MOVBloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val8()])
+ // result: (CMPBconst (MOVBloadidx1 {sym} [vo.Off()] ptr idx mem) [vo.Val8()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
v.reset(OpAMD64CMPBconst)
v.AuxInt = int8ToAuxInt(vo.Val8())
v0 := b.NewValue0(v.Pos, OpAMD64MOVBloadidx1, typ.UInt8)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg3(ptr, idx, mem)
v.AddArg(v0)
typ := &b.Func.Config.Types
// match: (CMPLconstload {sym} [vo] ptr mem)
// cond: vo.Val() == 0
- // result: (TESTL x:(MOVLload {sym} [vo.Off32()] ptr mem) x)
+ // result: (TESTL x:(MOVLload {sym} [vo.Off()] ptr mem) x)
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
}
v.reset(OpAMD64TESTL)
x := b.NewValue0(v.Pos, OpAMD64MOVLload, typ.UInt32)
- x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.AuxInt = int32ToAuxInt(vo.Off())
x.Aux = symToAux(sym)
x.AddArg2(ptr, mem)
v.AddArg2(x, x)
}
// match: (CMPLconstload {sym} [vo] ptr mem)
// cond: vo.Val() != 0
- // result: (CMPLconst (MOVLload {sym} [vo.Off32()] ptr mem) [vo.Val32()])
+ // result: (CMPLconst (MOVLload {sym} [vo.Off()] ptr mem) [vo.Val()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
break
}
v.reset(OpAMD64CMPLconst)
- v.AuxInt = int32ToAuxInt(vo.Val32())
+ v.AuxInt = int32ToAuxInt(vo.Val())
v0 := b.NewValue0(v.Pos, OpAMD64MOVLload, typ.UInt32)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg2(ptr, mem)
v.AddArg(v0)
typ := &b.Func.Config.Types
// match: (CMPLconstloadidx1 {sym} [vo] ptr idx mem)
// cond: vo.Val() == 0
- // result: (TESTL x:(MOVLloadidx1 {sym} [vo.Off32()] ptr idx mem) x)
+ // result: (TESTL x:(MOVLloadidx1 {sym} [vo.Off()] ptr idx mem) x)
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
}
v.reset(OpAMD64TESTL)
x := b.NewValue0(v.Pos, OpAMD64MOVLloadidx1, typ.UInt32)
- x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.AuxInt = int32ToAuxInt(vo.Off())
x.Aux = symToAux(sym)
x.AddArg3(ptr, idx, mem)
v.AddArg2(x, x)
}
// match: (CMPLconstloadidx1 {sym} [vo] ptr idx mem)
// cond: vo.Val() != 0
- // result: (CMPLconst (MOVLloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
+ // result: (CMPLconst (MOVLloadidx1 {sym} [vo.Off()] ptr idx mem) [vo.Val()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
break
}
v.reset(OpAMD64CMPLconst)
- v.AuxInt = int32ToAuxInt(vo.Val32())
+ v.AuxInt = int32ToAuxInt(vo.Val())
v0 := b.NewValue0(v.Pos, OpAMD64MOVLloadidx1, typ.UInt32)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg3(ptr, idx, mem)
v.AddArg(v0)
typ := &b.Func.Config.Types
// match: (CMPLconstloadidx4 {sym} [vo] ptr idx mem)
// cond: vo.Val() == 0
- // result: (TESTL x:(MOVLloadidx4 {sym} [vo.Off32()] ptr idx mem) x)
+ // result: (TESTL x:(MOVLloadidx4 {sym} [vo.Off()] ptr idx mem) x)
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
}
v.reset(OpAMD64TESTL)
x := b.NewValue0(v.Pos, OpAMD64MOVLloadidx4, typ.UInt32)
- x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.AuxInt = int32ToAuxInt(vo.Off())
x.Aux = symToAux(sym)
x.AddArg3(ptr, idx, mem)
v.AddArg2(x, x)
}
// match: (CMPLconstloadidx4 {sym} [vo] ptr idx mem)
// cond: vo.Val() != 0
- // result: (CMPLconst (MOVLloadidx4 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
+ // result: (CMPLconst (MOVLloadidx4 {sym} [vo.Off()] ptr idx mem) [vo.Val()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
break
}
v.reset(OpAMD64CMPLconst)
- v.AuxInt = int32ToAuxInt(vo.Val32())
+ v.AuxInt = int32ToAuxInt(vo.Val())
v0 := b.NewValue0(v.Pos, OpAMD64MOVLloadidx4, typ.UInt32)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg3(ptr, idx, mem)
v.AddArg(v0)
typ := &b.Func.Config.Types
// match: (CMPQconstload {sym} [vo] ptr mem)
// cond: vo.Val() == 0
- // result: (TESTQ x:(MOVQload {sym} [vo.Off32()] ptr mem) x)
+ // result: (TESTQ x:(MOVQload {sym} [vo.Off()] ptr mem) x)
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
}
v.reset(OpAMD64TESTQ)
x := b.NewValue0(v.Pos, OpAMD64MOVQload, typ.UInt64)
- x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.AuxInt = int32ToAuxInt(vo.Off())
x.Aux = symToAux(sym)
x.AddArg2(ptr, mem)
v.AddArg2(x, x)
}
// match: (CMPQconstload {sym} [vo] ptr mem)
// cond: vo.Val() != 0
- // result: (CMPQconst (MOVQload {sym} [vo.Off32()] ptr mem) [vo.Val32()])
+ // result: (CMPQconst (MOVQload {sym} [vo.Off()] ptr mem) [vo.Val()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
break
}
v.reset(OpAMD64CMPQconst)
- v.AuxInt = int32ToAuxInt(vo.Val32())
+ v.AuxInt = int32ToAuxInt(vo.Val())
v0 := b.NewValue0(v.Pos, OpAMD64MOVQload, typ.UInt64)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg2(ptr, mem)
v.AddArg(v0)
typ := &b.Func.Config.Types
// match: (CMPQconstloadidx1 {sym} [vo] ptr idx mem)
// cond: vo.Val() == 0
- // result: (TESTQ x:(MOVQloadidx1 {sym} [vo.Off32()] ptr idx mem) x)
+ // result: (TESTQ x:(MOVQloadidx1 {sym} [vo.Off()] ptr idx mem) x)
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
}
v.reset(OpAMD64TESTQ)
x := b.NewValue0(v.Pos, OpAMD64MOVQloadidx1, typ.UInt64)
- x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.AuxInt = int32ToAuxInt(vo.Off())
x.Aux = symToAux(sym)
x.AddArg3(ptr, idx, mem)
v.AddArg2(x, x)
}
// match: (CMPQconstloadidx1 {sym} [vo] ptr idx mem)
// cond: vo.Val() != 0
- // result: (CMPQconst (MOVQloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
+ // result: (CMPQconst (MOVQloadidx1 {sym} [vo.Off()] ptr idx mem) [vo.Val()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
break
}
v.reset(OpAMD64CMPQconst)
- v.AuxInt = int32ToAuxInt(vo.Val32())
+ v.AuxInt = int32ToAuxInt(vo.Val())
v0 := b.NewValue0(v.Pos, OpAMD64MOVQloadidx1, typ.UInt64)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg3(ptr, idx, mem)
v.AddArg(v0)
typ := &b.Func.Config.Types
// match: (CMPQconstloadidx8 {sym} [vo] ptr idx mem)
// cond: vo.Val() == 0
- // result: (TESTQ x:(MOVQloadidx8 {sym} [vo.Off32()] ptr idx mem) x)
+ // result: (TESTQ x:(MOVQloadidx8 {sym} [vo.Off()] ptr idx mem) x)
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
}
v.reset(OpAMD64TESTQ)
x := b.NewValue0(v.Pos, OpAMD64MOVQloadidx8, typ.UInt64)
- x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.AuxInt = int32ToAuxInt(vo.Off())
x.Aux = symToAux(sym)
x.AddArg3(ptr, idx, mem)
v.AddArg2(x, x)
}
// match: (CMPQconstloadidx8 {sym} [vo] ptr idx mem)
// cond: vo.Val() != 0
- // result: (CMPQconst (MOVQloadidx8 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
+ // result: (CMPQconst (MOVQloadidx8 {sym} [vo.Off()] ptr idx mem) [vo.Val()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
break
}
v.reset(OpAMD64CMPQconst)
- v.AuxInt = int32ToAuxInt(vo.Val32())
+ v.AuxInt = int32ToAuxInt(vo.Val())
v0 := b.NewValue0(v.Pos, OpAMD64MOVQloadidx8, typ.UInt64)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg3(ptr, idx, mem)
v.AddArg(v0)
typ := &b.Func.Config.Types
// match: (CMPWconstload {sym} [vo] ptr mem)
// cond: vo.Val() == 0
- // result: (TESTW x:(MOVWload {sym} [vo.Off32()] ptr mem) x)
+ // result: (TESTW x:(MOVWload {sym} [vo.Off()] ptr mem) x)
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
}
v.reset(OpAMD64TESTW)
x := b.NewValue0(v.Pos, OpAMD64MOVWload, typ.UInt16)
- x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.AuxInt = int32ToAuxInt(vo.Off())
x.Aux = symToAux(sym)
x.AddArg2(ptr, mem)
v.AddArg2(x, x)
}
// match: (CMPWconstload {sym} [vo] ptr mem)
// cond: vo.Val() != 0
- // result: (CMPWconst (MOVWload {sym} [vo.Off32()] ptr mem) [vo.Val16()])
+ // result: (CMPWconst (MOVWload {sym} [vo.Off()] ptr mem) [vo.Val16()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
v.reset(OpAMD64CMPWconst)
v.AuxInt = int16ToAuxInt(vo.Val16())
v0 := b.NewValue0(v.Pos, OpAMD64MOVWload, typ.UInt16)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg2(ptr, mem)
v.AddArg(v0)
typ := &b.Func.Config.Types
// match: (CMPWconstloadidx1 {sym} [vo] ptr idx mem)
// cond: vo.Val() == 0
- // result: (TESTW x:(MOVWloadidx1 {sym} [vo.Off32()] ptr idx mem) x)
+ // result: (TESTW x:(MOVWloadidx1 {sym} [vo.Off()] ptr idx mem) x)
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
}
v.reset(OpAMD64TESTW)
x := b.NewValue0(v.Pos, OpAMD64MOVWloadidx1, typ.UInt16)
- x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.AuxInt = int32ToAuxInt(vo.Off())
x.Aux = symToAux(sym)
x.AddArg3(ptr, idx, mem)
v.AddArg2(x, x)
}
// match: (CMPWconstloadidx1 {sym} [vo] ptr idx mem)
// cond: vo.Val() != 0
- // result: (CMPWconst (MOVWloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val16()])
+ // result: (CMPWconst (MOVWloadidx1 {sym} [vo.Off()] ptr idx mem) [vo.Val16()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
v.reset(OpAMD64CMPWconst)
v.AuxInt = int16ToAuxInt(vo.Val16())
v0 := b.NewValue0(v.Pos, OpAMD64MOVWloadidx1, typ.UInt16)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg3(ptr, idx, mem)
v.AddArg(v0)
typ := &b.Func.Config.Types
// match: (CMPWconstloadidx2 {sym} [vo] ptr idx mem)
// cond: vo.Val() == 0
- // result: (TESTW x:(MOVWloadidx2 {sym} [vo.Off32()] ptr idx mem) x)
+ // result: (TESTW x:(MOVWloadidx2 {sym} [vo.Off()] ptr idx mem) x)
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
}
v.reset(OpAMD64TESTW)
x := b.NewValue0(v.Pos, OpAMD64MOVWloadidx2, typ.UInt16)
- x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.AuxInt = int32ToAuxInt(vo.Off())
x.Aux = symToAux(sym)
x.AddArg3(ptr, idx, mem)
v.AddArg2(x, x)
}
// match: (CMPWconstloadidx2 {sym} [vo] ptr idx mem)
// cond: vo.Val() != 0
- // result: (CMPWconst (MOVWloadidx2 {sym} [vo.Off32()] ptr idx mem) [vo.Val16()])
+ // result: (CMPWconst (MOVWloadidx2 {sym} [vo.Off()] ptr idx mem) [vo.Val16()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
v.reset(OpAMD64CMPWconst)
v.AuxInt = int16ToAuxInt(vo.Val16())
v0 := b.NewValue0(v.Pos, OpAMD64MOVWloadidx2, typ.UInt16)
- v0.AuxInt = int32ToAuxInt(vo.Off32())
+ v0.AuxInt = int32ToAuxInt(vo.Off())
v0.Aux = symToAux(sym)
v0.AddArg3(ptr, idx, mem)
v.AddArg(v0)
}
// match: (Move [s] dst src mem)
// cond: s > 0 && s <= 256 && logLargeCopy(v, s)
- // result: (MVC [makeValAndOff32(int32(s), 0)] dst src mem)
+ // result: (MVC [makeValAndOff(int32(s), 0)] dst src mem)
for {
s := auxIntToInt64(v.AuxInt)
dst := v_0
break
}
v.reset(OpS390XMVC)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(s), 0))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(s), 0))
v.AddArg3(dst, src, mem)
return true
}
// match: (Move [s] dst src mem)
// cond: s > 256 && s <= 512 && logLargeCopy(v, s)
- // result: (MVC [makeValAndOff32(int32(s)-256, 256)] dst src (MVC [makeValAndOff32(256, 0)] dst src mem))
+ // result: (MVC [makeValAndOff(int32(s)-256, 256)] dst src (MVC [makeValAndOff(256, 0)] dst src mem))
for {
s := auxIntToInt64(v.AuxInt)
dst := v_0
break
}
v.reset(OpS390XMVC)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(s)-256, 256))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(s)-256, 256))
v0 := b.NewValue0(v.Pos, OpS390XMVC, types.TypeMem)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(256, 0))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(256, 0))
v0.AddArg3(dst, src, mem)
v.AddArg3(dst, src, v0)
return true
}
// match: (Move [s] dst src mem)
// cond: s > 512 && s <= 768 && logLargeCopy(v, s)
- // result: (MVC [makeValAndOff32(int32(s)-512, 512)] dst src (MVC [makeValAndOff32(256, 256)] dst src (MVC [makeValAndOff32(256, 0)] dst src mem)))
+ // result: (MVC [makeValAndOff(int32(s)-512, 512)] dst src (MVC [makeValAndOff(256, 256)] dst src (MVC [makeValAndOff(256, 0)] dst src mem)))
for {
s := auxIntToInt64(v.AuxInt)
dst := v_0
break
}
v.reset(OpS390XMVC)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(s)-512, 512))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(s)-512, 512))
v0 := b.NewValue0(v.Pos, OpS390XMVC, types.TypeMem)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(256, 256))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(256, 256))
v1 := b.NewValue0(v.Pos, OpS390XMVC, types.TypeMem)
- v1.AuxInt = valAndOffToAuxInt(makeValAndOff32(256, 0))
+ v1.AuxInt = valAndOffToAuxInt(makeValAndOff(256, 0))
v1.AddArg3(dst, src, mem)
v0.AddArg3(dst, src, v1)
v.AddArg3(dst, src, v0)
}
// match: (Move [s] dst src mem)
// cond: s > 768 && s <= 1024 && logLargeCopy(v, s)
- // result: (MVC [makeValAndOff32(int32(s)-768, 768)] dst src (MVC [makeValAndOff32(256, 512)] dst src (MVC [makeValAndOff32(256, 256)] dst src (MVC [makeValAndOff32(256, 0)] dst src mem))))
+ // result: (MVC [makeValAndOff(int32(s)-768, 768)] dst src (MVC [makeValAndOff(256, 512)] dst src (MVC [makeValAndOff(256, 256)] dst src (MVC [makeValAndOff(256, 0)] dst src mem))))
for {
s := auxIntToInt64(v.AuxInt)
dst := v_0
break
}
v.reset(OpS390XMVC)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(s)-768, 768))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(s)-768, 768))
v0 := b.NewValue0(v.Pos, OpS390XMVC, types.TypeMem)
- v0.AuxInt = valAndOffToAuxInt(makeValAndOff32(256, 512))
+ v0.AuxInt = valAndOffToAuxInt(makeValAndOff(256, 512))
v1 := b.NewValue0(v.Pos, OpS390XMVC, types.TypeMem)
- v1.AuxInt = valAndOffToAuxInt(makeValAndOff32(256, 256))
+ v1.AuxInt = valAndOffToAuxInt(makeValAndOff(256, 256))
v2 := b.NewValue0(v.Pos, OpS390XMVC, types.TypeMem)
- v2.AuxInt = valAndOffToAuxInt(makeValAndOff32(256, 0))
+ v2.AuxInt = valAndOffToAuxInt(makeValAndOff(256, 0))
v2.AddArg3(dst, src, mem)
v1.AddArg3(dst, src, v2)
v0.AddArg3(dst, src, v1)
}
// match: (MOVBstore [off] {sym} ptr (MOVDconst [c]) mem)
// cond: is20Bit(int64(off)) && ptr.Op != OpSB
- // result: (MOVBstoreconst [makeValAndOff32(int32(int8(c)),off)] {sym} ptr mem)
+ // result: (MOVBstoreconst [makeValAndOff(int32(int8(c)),off)] {sym} ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
break
}
v.reset(OpS390XMOVBstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(int8(c)), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(int8(c)), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (MOVBstoreconst [sc] {s} (ADDconst [off] ptr) mem)
- // cond: is20Bit(sc.Off()+int64(off))
+ // cond: is20Bit(sc.Off64()+int64(off))
// result: (MOVBstoreconst [sc.addOffset32(off)] {s} ptr mem)
for {
sc := auxIntToValAndOff(v.AuxInt)
off := auxIntToInt32(v_0.AuxInt)
ptr := v_0.Args[0]
mem := v_1
- if !(is20Bit(sc.Off() + int64(off))) {
+ if !(is20Bit(sc.Off64() + int64(off))) {
break
}
v.reset(OpS390XMOVBstoreconst)
}
// match: (MOVBstoreconst [c] {s} p x:(MOVBstoreconst [a] {s} p mem))
// cond: p.Op != OpSB && x.Uses == 1 && a.Off() + 1 == c.Off() && clobber(x)
- // result: (MOVHstoreconst [makeValAndOff32(c.Val32()&0xff | a.Val32()<<8, a.Off32())] {s} p mem)
+ // result: (MOVHstoreconst [makeValAndOff(c.Val()&0xff | a.Val()<<8, a.Off())] {s} p mem)
for {
c := auxIntToValAndOff(v.AuxInt)
s := auxToSym(v.Aux)
break
}
v.reset(OpS390XMOVHstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(c.Val32()&0xff|a.Val32()<<8, a.Off32()))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(c.Val()&0xff|a.Val()<<8, a.Off()))
v.Aux = symToAux(s)
v.AddArg2(p, mem)
return true
}
// match: (MOVDstore [off] {sym} ptr (MOVDconst [c]) mem)
// cond: is16Bit(c) && isU12Bit(int64(off)) && ptr.Op != OpSB
- // result: (MOVDstoreconst [makeValAndOff32(int32(c),off)] {sym} ptr mem)
+ // result: (MOVDstoreconst [makeValAndOff(int32(c),off)] {sym} ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
break
}
v.reset(OpS390XMOVDstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (MOVDstoreconst [sc] {s} (ADDconst [off] ptr) mem)
- // cond: isU12Bit(sc.Off()+int64(off))
+ // cond: isU12Bit(sc.Off64()+int64(off))
// result: (MOVDstoreconst [sc.addOffset32(off)] {s} ptr mem)
for {
sc := auxIntToValAndOff(v.AuxInt)
off := auxIntToInt32(v_0.AuxInt)
ptr := v_0.Args[0]
mem := v_1
- if !(isU12Bit(sc.Off() + int64(off))) {
+ if !(isU12Bit(sc.Off64() + int64(off))) {
break
}
v.reset(OpS390XMOVDstoreconst)
}
// match: (MOVHstore [off] {sym} ptr (MOVDconst [c]) mem)
// cond: isU12Bit(int64(off)) && ptr.Op != OpSB
- // result: (MOVHstoreconst [makeValAndOff32(int32(int16(c)),off)] {sym} ptr mem)
+ // result: (MOVHstoreconst [makeValAndOff(int32(int16(c)),off)] {sym} ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
break
}
v.reset(OpS390XMOVHstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(int16(c)), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(int16(c)), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
b := v.Block
typ := &b.Func.Config.Types
// match: (MOVHstoreconst [sc] {s} (ADDconst [off] ptr) mem)
- // cond: isU12Bit(sc.Off()+int64(off))
+ // cond: isU12Bit(sc.Off64()+int64(off))
// result: (MOVHstoreconst [sc.addOffset32(off)] {s} ptr mem)
for {
sc := auxIntToValAndOff(v.AuxInt)
off := auxIntToInt32(v_0.AuxInt)
ptr := v_0.Args[0]
mem := v_1
- if !(isU12Bit(sc.Off() + int64(off))) {
+ if !(isU12Bit(sc.Off64() + int64(off))) {
break
}
v.reset(OpS390XMOVHstoreconst)
}
// match: (MOVHstoreconst [c] {s} p x:(MOVHstoreconst [a] {s} p mem))
// cond: p.Op != OpSB && x.Uses == 1 && a.Off() + 2 == c.Off() && clobber(x)
- // result: (MOVWstore [a.Off32()] {s} p (MOVDconst [int64(c.Val32()&0xffff | a.Val32()<<16)]) mem)
+ // result: (MOVWstore [a.Off()] {s} p (MOVDconst [int64(c.Val()&0xffff | a.Val()<<16)]) mem)
for {
c := auxIntToValAndOff(v.AuxInt)
s := auxToSym(v.Aux)
break
}
v.reset(OpS390XMOVWstore)
- v.AuxInt = int32ToAuxInt(a.Off32())
+ v.AuxInt = int32ToAuxInt(a.Off())
v.Aux = symToAux(s)
v0 := b.NewValue0(x.Pos, OpS390XMOVDconst, typ.UInt64)
- v0.AuxInt = int64ToAuxInt(int64(c.Val32()&0xffff | a.Val32()<<16))
+ v0.AuxInt = int64ToAuxInt(int64(c.Val()&0xffff | a.Val()<<16))
v.AddArg3(p, v0, mem)
return true
}
}
// match: (MOVWstore [off] {sym} ptr (MOVDconst [c]) mem)
// cond: is16Bit(c) && isU12Bit(int64(off)) && ptr.Op != OpSB
- // result: (MOVWstoreconst [makeValAndOff32(int32(c),off)] {sym} ptr mem)
+ // result: (MOVWstoreconst [makeValAndOff(int32(c),off)] {sym} ptr mem)
for {
off := auxIntToInt32(v.AuxInt)
sym := auxToSym(v.Aux)
break
}
v.reset(OpS390XMOVWstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(c), off))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(c), off))
v.Aux = symToAux(sym)
v.AddArg2(ptr, mem)
return true
b := v.Block
typ := &b.Func.Config.Types
// match: (MOVWstoreconst [sc] {s} (ADDconst [off] ptr) mem)
- // cond: isU12Bit(sc.Off()+int64(off))
+ // cond: isU12Bit(sc.Off64()+int64(off))
// result: (MOVWstoreconst [sc.addOffset32(off)] {s} ptr mem)
for {
sc := auxIntToValAndOff(v.AuxInt)
off := auxIntToInt32(v_0.AuxInt)
ptr := v_0.Args[0]
mem := v_1
- if !(isU12Bit(sc.Off() + int64(off))) {
+ if !(isU12Bit(sc.Off64() + int64(off))) {
break
}
v.reset(OpS390XMOVWstoreconst)
}
// match: (MOVWstoreconst [c] {s} p x:(MOVWstoreconst [a] {s} p mem))
// cond: p.Op != OpSB && x.Uses == 1 && a.Off() + 4 == c.Off() && clobber(x)
- // result: (MOVDstore [a.Off32()] {s} p (MOVDconst [c.Val()&0xffffffff | a.Val()<<32]) mem)
+ // result: (MOVDstore [a.Off()] {s} p (MOVDconst [c.Val64()&0xffffffff | a.Val64()<<32]) mem)
for {
c := auxIntToValAndOff(v.AuxInt)
s := auxToSym(v.Aux)
break
}
v.reset(OpS390XMOVDstore)
- v.AuxInt = int32ToAuxInt(a.Off32())
+ v.AuxInt = int32ToAuxInt(a.Off())
v.Aux = symToAux(s)
v0 := b.NewValue0(x.Pos, OpS390XMOVDconst, typ.UInt64)
- v0.AuxInt = int64ToAuxInt(c.Val()&0xffffffff | a.Val()<<32)
+ v0.AuxInt = int64ToAuxInt(c.Val64()&0xffffffff | a.Val64()<<32)
v.AddArg3(p, v0, mem)
return true
}
return true
}
// match: (Zero [3] destptr mem)
- // result: (MOVBstoreconst [makeValAndOff32(0,2)] destptr (MOVHstoreconst [0] destptr mem))
+ // result: (MOVBstoreconst [makeValAndOff(0,2)] destptr (MOVHstoreconst [0] destptr mem))
for {
if auxIntToInt64(v.AuxInt) != 3 {
break
destptr := v_0
mem := v_1
v.reset(OpS390XMOVBstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 2))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 2))
v0 := b.NewValue0(v.Pos, OpS390XMOVHstoreconst, types.TypeMem)
v0.AuxInt = valAndOffToAuxInt(0)
v0.AddArg2(destptr, mem)
return true
}
// match: (Zero [5] destptr mem)
- // result: (MOVBstoreconst [makeValAndOff32(0,4)] destptr (MOVWstoreconst [0] destptr mem))
+ // result: (MOVBstoreconst [makeValAndOff(0,4)] destptr (MOVWstoreconst [0] destptr mem))
for {
if auxIntToInt64(v.AuxInt) != 5 {
break
destptr := v_0
mem := v_1
v.reset(OpS390XMOVBstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 4))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 4))
v0 := b.NewValue0(v.Pos, OpS390XMOVWstoreconst, types.TypeMem)
v0.AuxInt = valAndOffToAuxInt(0)
v0.AddArg2(destptr, mem)
return true
}
// match: (Zero [6] destptr mem)
- // result: (MOVHstoreconst [makeValAndOff32(0,4)] destptr (MOVWstoreconst [0] destptr mem))
+ // result: (MOVHstoreconst [makeValAndOff(0,4)] destptr (MOVWstoreconst [0] destptr mem))
for {
if auxIntToInt64(v.AuxInt) != 6 {
break
destptr := v_0
mem := v_1
v.reset(OpS390XMOVHstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 4))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 4))
v0 := b.NewValue0(v.Pos, OpS390XMOVWstoreconst, types.TypeMem)
v0.AuxInt = valAndOffToAuxInt(0)
v0.AddArg2(destptr, mem)
return true
}
// match: (Zero [7] destptr mem)
- // result: (MOVWstoreconst [makeValAndOff32(0,3)] destptr (MOVWstoreconst [0] destptr mem))
+ // result: (MOVWstoreconst [makeValAndOff(0,3)] destptr (MOVWstoreconst [0] destptr mem))
for {
if auxIntToInt64(v.AuxInt) != 7 {
break
destptr := v_0
mem := v_1
v.reset(OpS390XMOVWstoreconst)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(0, 3))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(0, 3))
v0 := b.NewValue0(v.Pos, OpS390XMOVWstoreconst, types.TypeMem)
v0.AuxInt = valAndOffToAuxInt(0)
v0.AddArg2(destptr, mem)
}
// match: (Zero [s] destptr mem)
// cond: s > 0 && s <= 1024
- // result: (CLEAR [makeValAndOff32(int32(s), 0)] destptr mem)
+ // result: (CLEAR [makeValAndOff(int32(s), 0)] destptr mem)
for {
s := auxIntToInt64(v.AuxInt)
destptr := v_0
break
}
v.reset(OpS390XCLEAR)
- v.AuxInt = valAndOffToAuxInt(makeValAndOff32(int32(s), 0))
+ v.AuxInt = valAndOffToAuxInt(makeValAndOff(int32(s), 0))
v.AddArg2(destptr, mem)
return true
}
p := s.Prog(v.Op.Asm())
p.From.Type = obj.TYPE_MEM
p.From.Reg = v.Args[0].Reg()
- ssagen.AddAux2(&p.From, v, sc.Off())
+ ssagen.AddAux2(&p.From, v, sc.Off64())
p.To.Type = obj.TYPE_CONST
- p.To.Offset = sc.Val()
+ p.To.Offset = sc.Val64()
case ssa.Op386MOVLconst:
x := v.Reg()
} else {
p = s.Prog(x86.ADECL)
}
- off := sc.Off()
+ off := sc.Off64()
p.To.Type = obj.TYPE_MEM
p.To.Reg = v.Args[0].Reg()
ssagen.AddAux2(&p.To, v, off)
fallthrough
case ssa.Op386ANDLconstmodify, ssa.Op386ORLconstmodify, ssa.Op386XORLconstmodify:
sc := v.AuxValAndOff()
- off := sc.Off()
- val := sc.Val()
+ off := sc.Off64()
+ val := sc.Val64()
p := s.Prog(v.Op.Asm())
p.From.Type = obj.TYPE_CONST
p.From.Offset = val
p := s.Prog(v.Op.Asm())
p.From.Type = obj.TYPE_CONST
sc := v.AuxValAndOff()
- p.From.Offset = sc.Val()
+ p.From.Offset = sc.Val64()
p.To.Type = obj.TYPE_MEM
p.To.Reg = v.Args[0].Reg()
- ssagen.AddAux2(&p.To, v, sc.Off())
+ ssagen.AddAux2(&p.To, v, sc.Off64())
case ssa.Op386ADDLconstmodifyidx4:
sc := v.AuxValAndOff()
val := sc.Val()
} else {
p = s.Prog(x86.ADECL)
}
- off := sc.Off()
+ off := sc.Off64()
p.To.Type = obj.TYPE_MEM
p.To.Reg = v.Args[0].Reg()
p.To.Scale = 4
p := s.Prog(v.Op.Asm())
p.From.Type = obj.TYPE_CONST
sc := v.AuxValAndOff()
- p.From.Offset = sc.Val()
+ p.From.Offset = sc.Val64()
r := v.Args[0].Reg()
i := v.Args[1].Reg()
switch v.Op {
p.To.Type = obj.TYPE_MEM
p.To.Reg = r
p.To.Index = i
- ssagen.AddAux2(&p.To, v, sc.Off())
+ ssagen.AddAux2(&p.To, v, sc.Off64())
case ssa.Op386MOVWLSX, ssa.Op386MOVBLSX, ssa.Op386MOVWLZX, ssa.Op386MOVBLZX,
ssa.Op386CVTSL2SS, ssa.Op386CVTSL2SD,
ssa.Op386CVTTSS2SL, ssa.Op386CVTTSD2SL,