(CMP(Q|L|W|B)load {sym} [off] ptr x mem) => (CMP(Q|L|W|B) (MOV(Q|L|W|B)load {sym} [off] ptr mem) x)
-(CMPQconstload {sym} [vo] ptr mem) => (CMPQconst (MOVQload {sym} [vo.Off32()] ptr mem) [vo.Val32()])
-(CMPLconstload {sym} [vo] ptr mem) => (CMPLconst (MOVLload {sym} [vo.Off32()] ptr mem) [vo.Val32()])
-(CMPWconstload {sym} [vo] ptr mem) => (CMPWconst (MOVWload {sym} [vo.Off32()] ptr mem) [vo.Val16()])
-(CMPBconstload {sym} [vo] ptr mem) => (CMPBconst (MOVBload {sym} [vo.Off32()] ptr mem) [vo.Val8()])
+(CMP(Q|L|W|B)constload {sym} [vo] ptr mem) && vo.Val() == 0 => (TEST(Q|L|W|B) x:(MOV(Q|L|W|B)load {sym} [vo.Off32()] ptr mem) x)
+
+(CMPQconstload {sym} [vo] ptr mem) && vo.Val() != 0 => (CMPQconst (MOVQload {sym} [vo.Off32()] ptr mem) [vo.Val32()])
+(CMPLconstload {sym} [vo] ptr mem) && vo.Val() != 0 => (CMPLconst (MOVLload {sym} [vo.Off32()] ptr mem) [vo.Val32()])
+(CMPWconstload {sym} [vo] ptr mem) && vo.Val() != 0 => (CMPWconst (MOVWload {sym} [vo.Off32()] ptr mem) [vo.Val16()])
+(CMPBconstload {sym} [vo] ptr mem) && vo.Val() != 0 => (CMPBconst (MOVBload {sym} [vo.Off32()] ptr mem) [vo.Val8()])
(CMP(Q|L|W|B)loadidx1 {sym} [off] ptr idx x mem) => (CMP(Q|L|W|B) (MOV(Q|L|W|B)loadidx1 {sym} [off] ptr idx mem) x)
(CMPQloadidx8 {sym} [off] ptr idx x mem) => (CMPQ (MOVQloadidx8 {sym} [off] ptr idx mem) x)
(CMPLloadidx4 {sym} [off] ptr idx x mem) => (CMPL (MOVLloadidx4 {sym} [off] ptr idx mem) x)
(CMPWloadidx2 {sym} [off] ptr idx x mem) => (CMPW (MOVWloadidx2 {sym} [off] ptr idx mem) x)
-(CMPQconstloadidx1 {sym} [vo] ptr idx mem) => (CMPQconst (MOVQloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
-(CMPLconstloadidx1 {sym} [vo] ptr idx mem) => (CMPLconst (MOVLloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
-(CMPWconstloadidx1 {sym} [vo] ptr idx mem) => (CMPWconst (MOVWloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val16()])
-(CMPBconstloadidx1 {sym} [vo] ptr idx mem) => (CMPBconst (MOVBloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val8()])
+(CMP(Q|L|W|B)constloadidx1 {sym} [vo] ptr idx mem) && vo.Val() == 0 => (TEST(Q|L|W|B) x:(MOV(Q|L|W|B)loadidx1 {sym} [vo.Off32()] ptr idx mem) x)
+(CMPQconstloadidx8 {sym} [vo] ptr idx mem) && vo.Val() == 0 => (TESTQ x:(MOVQloadidx8 {sym} [vo.Off32()] ptr idx mem) x)
+(CMPLconstloadidx4 {sym} [vo] ptr idx mem) && vo.Val() == 0 => (TESTL x:(MOVLloadidx4 {sym} [vo.Off32()] ptr idx mem) x)
+(CMPWconstloadidx2 {sym} [vo] ptr idx mem) && vo.Val() == 0 => (TESTW x:(MOVWloadidx2 {sym} [vo.Off32()] ptr idx mem) x)
+
+(CMPQconstloadidx1 {sym} [vo] ptr idx mem) && vo.Val() != 0 => (CMPQconst (MOVQloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
+(CMPLconstloadidx1 {sym} [vo] ptr idx mem) && vo.Val() != 0 => (CMPLconst (MOVLloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
+(CMPWconstloadidx1 {sym} [vo] ptr idx mem) && vo.Val() != 0 => (CMPWconst (MOVWloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val16()])
+(CMPBconstloadidx1 {sym} [vo] ptr idx mem) && vo.Val() != 0 => (CMPBconst (MOVBloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val8()])
-(CMPQconstloadidx8 {sym} [vo] ptr idx mem) => (CMPQconst (MOVQloadidx8 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
-(CMPLconstloadidx4 {sym} [vo] ptr idx mem) => (CMPLconst (MOVLloadidx4 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
-(CMPWconstloadidx2 {sym} [vo] ptr idx mem) => (CMPWconst (MOVWloadidx2 {sym} [vo.Off32()] ptr idx mem) [vo.Val16()])
+(CMPQconstloadidx8 {sym} [vo] ptr idx mem) && vo.Val() != 0 => (CMPQconst (MOVQloadidx8 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
+(CMPLconstloadidx4 {sym} [vo] ptr idx mem) && vo.Val() != 0 => (CMPLconst (MOVLloadidx4 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
+(CMPWconstloadidx2 {sym} [vo] ptr idx mem) && vo.Val() != 0 => (CMPWconst (MOVWloadidx2 {sym} [vo.Off32()] ptr idx mem) [vo.Val16()])
b := v.Block
typ := &b.Func.Config.Types
// match: (CMPBconstload {sym} [vo] ptr mem)
+ // cond: vo.Val() == 0
+ // result: (TESTB x:(MOVBload {sym} [vo.Off32()] ptr mem) x)
+ for {
+ vo := auxIntToValAndOff(v.AuxInt)
+ sym := auxToSym(v.Aux)
+ ptr := v_0
+ mem := v_1
+ if !(vo.Val() == 0) {
+ break
+ }
+ v.reset(OpAMD64TESTB)
+ x := b.NewValue0(v.Pos, OpAMD64MOVBload, typ.UInt8)
+ x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.Aux = symToAux(sym)
+ x.AddArg2(ptr, mem)
+ v.AddArg2(x, x)
+ return true
+ }
+ // match: (CMPBconstload {sym} [vo] ptr mem)
+ // cond: vo.Val() != 0
// result: (CMPBconst (MOVBload {sym} [vo.Off32()] ptr mem) [vo.Val8()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
ptr := v_0
mem := v_1
+ if !(vo.Val() != 0) {
+ break
+ }
v.reset(OpAMD64CMPBconst)
v.AuxInt = int8ToAuxInt(vo.Val8())
v0 := b.NewValue0(v.Pos, OpAMD64MOVBload, typ.UInt8)
v.AddArg(v0)
return true
}
+ return false
}
func rewriteValueAMD64splitload_OpAMD64CMPBconstloadidx1(v *Value) bool {
v_2 := v.Args[2]
b := v.Block
typ := &b.Func.Config.Types
// match: (CMPBconstloadidx1 {sym} [vo] ptr idx mem)
+ // cond: vo.Val() == 0
+ // result: (TESTB x:(MOVBloadidx1 {sym} [vo.Off32()] ptr idx mem) x)
+ for {
+ vo := auxIntToValAndOff(v.AuxInt)
+ sym := auxToSym(v.Aux)
+ ptr := v_0
+ idx := v_1
+ mem := v_2
+ if !(vo.Val() == 0) {
+ break
+ }
+ v.reset(OpAMD64TESTB)
+ x := b.NewValue0(v.Pos, OpAMD64MOVBloadidx1, typ.UInt8)
+ x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.Aux = symToAux(sym)
+ x.AddArg3(ptr, idx, mem)
+ v.AddArg2(x, x)
+ return true
+ }
+ // match: (CMPBconstloadidx1 {sym} [vo] ptr idx mem)
+ // cond: vo.Val() != 0
// result: (CMPBconst (MOVBloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val8()])
for {
vo := auxIntToValAndOff(v.AuxInt)
ptr := v_0
idx := v_1
mem := v_2
+ if !(vo.Val() != 0) {
+ break
+ }
v.reset(OpAMD64CMPBconst)
v.AuxInt = int8ToAuxInt(vo.Val8())
v0 := b.NewValue0(v.Pos, OpAMD64MOVBloadidx1, typ.UInt8)
v.AddArg(v0)
return true
}
+ return false
}
func rewriteValueAMD64splitload_OpAMD64CMPBload(v *Value) bool {
v_2 := v.Args[2]
b := v.Block
typ := &b.Func.Config.Types
// match: (CMPLconstload {sym} [vo] ptr mem)
+ // cond: vo.Val() == 0
+ // result: (TESTL x:(MOVLload {sym} [vo.Off32()] ptr mem) x)
+ for {
+ vo := auxIntToValAndOff(v.AuxInt)
+ sym := auxToSym(v.Aux)
+ ptr := v_0
+ mem := v_1
+ if !(vo.Val() == 0) {
+ break
+ }
+ v.reset(OpAMD64TESTL)
+ x := b.NewValue0(v.Pos, OpAMD64MOVLload, typ.UInt32)
+ x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.Aux = symToAux(sym)
+ x.AddArg2(ptr, mem)
+ v.AddArg2(x, x)
+ return true
+ }
+ // match: (CMPLconstload {sym} [vo] ptr mem)
+ // cond: vo.Val() != 0
// result: (CMPLconst (MOVLload {sym} [vo.Off32()] ptr mem) [vo.Val32()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
ptr := v_0
mem := v_1
+ if !(vo.Val() != 0) {
+ break
+ }
v.reset(OpAMD64CMPLconst)
v.AuxInt = int32ToAuxInt(vo.Val32())
v0 := b.NewValue0(v.Pos, OpAMD64MOVLload, typ.UInt32)
v.AddArg(v0)
return true
}
+ return false
}
func rewriteValueAMD64splitload_OpAMD64CMPLconstloadidx1(v *Value) bool {
v_2 := v.Args[2]
b := v.Block
typ := &b.Func.Config.Types
// match: (CMPLconstloadidx1 {sym} [vo] ptr idx mem)
+ // cond: vo.Val() == 0
+ // result: (TESTL x:(MOVLloadidx1 {sym} [vo.Off32()] ptr idx mem) x)
+ for {
+ vo := auxIntToValAndOff(v.AuxInt)
+ sym := auxToSym(v.Aux)
+ ptr := v_0
+ idx := v_1
+ mem := v_2
+ if !(vo.Val() == 0) {
+ break
+ }
+ v.reset(OpAMD64TESTL)
+ x := b.NewValue0(v.Pos, OpAMD64MOVLloadidx1, typ.UInt32)
+ x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.Aux = symToAux(sym)
+ x.AddArg3(ptr, idx, mem)
+ v.AddArg2(x, x)
+ return true
+ }
+ // match: (CMPLconstloadidx1 {sym} [vo] ptr idx mem)
+ // cond: vo.Val() != 0
// result: (CMPLconst (MOVLloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
for {
vo := auxIntToValAndOff(v.AuxInt)
ptr := v_0
idx := v_1
mem := v_2
+ if !(vo.Val() != 0) {
+ break
+ }
v.reset(OpAMD64CMPLconst)
v.AuxInt = int32ToAuxInt(vo.Val32())
v0 := b.NewValue0(v.Pos, OpAMD64MOVLloadidx1, typ.UInt32)
v.AddArg(v0)
return true
}
+ return false
}
func rewriteValueAMD64splitload_OpAMD64CMPLconstloadidx4(v *Value) bool {
v_2 := v.Args[2]
b := v.Block
typ := &b.Func.Config.Types
// match: (CMPLconstloadidx4 {sym} [vo] ptr idx mem)
+ // cond: vo.Val() == 0
+ // result: (TESTL x:(MOVLloadidx4 {sym} [vo.Off32()] ptr idx mem) x)
+ for {
+ vo := auxIntToValAndOff(v.AuxInt)
+ sym := auxToSym(v.Aux)
+ ptr := v_0
+ idx := v_1
+ mem := v_2
+ if !(vo.Val() == 0) {
+ break
+ }
+ v.reset(OpAMD64TESTL)
+ x := b.NewValue0(v.Pos, OpAMD64MOVLloadidx4, typ.UInt32)
+ x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.Aux = symToAux(sym)
+ x.AddArg3(ptr, idx, mem)
+ v.AddArg2(x, x)
+ return true
+ }
+ // match: (CMPLconstloadidx4 {sym} [vo] ptr idx mem)
+ // cond: vo.Val() != 0
// result: (CMPLconst (MOVLloadidx4 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
for {
vo := auxIntToValAndOff(v.AuxInt)
ptr := v_0
idx := v_1
mem := v_2
+ if !(vo.Val() != 0) {
+ break
+ }
v.reset(OpAMD64CMPLconst)
v.AuxInt = int32ToAuxInt(vo.Val32())
v0 := b.NewValue0(v.Pos, OpAMD64MOVLloadidx4, typ.UInt32)
v.AddArg(v0)
return true
}
+ return false
}
func rewriteValueAMD64splitload_OpAMD64CMPLload(v *Value) bool {
v_2 := v.Args[2]
b := v.Block
typ := &b.Func.Config.Types
// match: (CMPQconstload {sym} [vo] ptr mem)
+ // cond: vo.Val() == 0
+ // result: (TESTQ x:(MOVQload {sym} [vo.Off32()] ptr mem) x)
+ for {
+ vo := auxIntToValAndOff(v.AuxInt)
+ sym := auxToSym(v.Aux)
+ ptr := v_0
+ mem := v_1
+ if !(vo.Val() == 0) {
+ break
+ }
+ v.reset(OpAMD64TESTQ)
+ x := b.NewValue0(v.Pos, OpAMD64MOVQload, typ.UInt64)
+ x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.Aux = symToAux(sym)
+ x.AddArg2(ptr, mem)
+ v.AddArg2(x, x)
+ return true
+ }
+ // match: (CMPQconstload {sym} [vo] ptr mem)
+ // cond: vo.Val() != 0
// result: (CMPQconst (MOVQload {sym} [vo.Off32()] ptr mem) [vo.Val32()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
ptr := v_0
mem := v_1
+ if !(vo.Val() != 0) {
+ break
+ }
v.reset(OpAMD64CMPQconst)
v.AuxInt = int32ToAuxInt(vo.Val32())
v0 := b.NewValue0(v.Pos, OpAMD64MOVQload, typ.UInt64)
v.AddArg(v0)
return true
}
+ return false
}
func rewriteValueAMD64splitload_OpAMD64CMPQconstloadidx1(v *Value) bool {
v_2 := v.Args[2]
b := v.Block
typ := &b.Func.Config.Types
// match: (CMPQconstloadidx1 {sym} [vo] ptr idx mem)
+ // cond: vo.Val() == 0
+ // result: (TESTQ x:(MOVQloadidx1 {sym} [vo.Off32()] ptr idx mem) x)
+ for {
+ vo := auxIntToValAndOff(v.AuxInt)
+ sym := auxToSym(v.Aux)
+ ptr := v_0
+ idx := v_1
+ mem := v_2
+ if !(vo.Val() == 0) {
+ break
+ }
+ v.reset(OpAMD64TESTQ)
+ x := b.NewValue0(v.Pos, OpAMD64MOVQloadidx1, typ.UInt64)
+ x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.Aux = symToAux(sym)
+ x.AddArg3(ptr, idx, mem)
+ v.AddArg2(x, x)
+ return true
+ }
+ // match: (CMPQconstloadidx1 {sym} [vo] ptr idx mem)
+ // cond: vo.Val() != 0
// result: (CMPQconst (MOVQloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
for {
vo := auxIntToValAndOff(v.AuxInt)
ptr := v_0
idx := v_1
mem := v_2
+ if !(vo.Val() != 0) {
+ break
+ }
v.reset(OpAMD64CMPQconst)
v.AuxInt = int32ToAuxInt(vo.Val32())
v0 := b.NewValue0(v.Pos, OpAMD64MOVQloadidx1, typ.UInt64)
v.AddArg(v0)
return true
}
+ return false
}
func rewriteValueAMD64splitload_OpAMD64CMPQconstloadidx8(v *Value) bool {
v_2 := v.Args[2]
b := v.Block
typ := &b.Func.Config.Types
// match: (CMPQconstloadidx8 {sym} [vo] ptr idx mem)
+ // cond: vo.Val() == 0
+ // result: (TESTQ x:(MOVQloadidx8 {sym} [vo.Off32()] ptr idx mem) x)
+ for {
+ vo := auxIntToValAndOff(v.AuxInt)
+ sym := auxToSym(v.Aux)
+ ptr := v_0
+ idx := v_1
+ mem := v_2
+ if !(vo.Val() == 0) {
+ break
+ }
+ v.reset(OpAMD64TESTQ)
+ x := b.NewValue0(v.Pos, OpAMD64MOVQloadidx8, typ.UInt64)
+ x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.Aux = symToAux(sym)
+ x.AddArg3(ptr, idx, mem)
+ v.AddArg2(x, x)
+ return true
+ }
+ // match: (CMPQconstloadidx8 {sym} [vo] ptr idx mem)
+ // cond: vo.Val() != 0
// result: (CMPQconst (MOVQloadidx8 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
for {
vo := auxIntToValAndOff(v.AuxInt)
ptr := v_0
idx := v_1
mem := v_2
+ if !(vo.Val() != 0) {
+ break
+ }
v.reset(OpAMD64CMPQconst)
v.AuxInt = int32ToAuxInt(vo.Val32())
v0 := b.NewValue0(v.Pos, OpAMD64MOVQloadidx8, typ.UInt64)
v.AddArg(v0)
return true
}
+ return false
}
func rewriteValueAMD64splitload_OpAMD64CMPQload(v *Value) bool {
v_2 := v.Args[2]
b := v.Block
typ := &b.Func.Config.Types
// match: (CMPWconstload {sym} [vo] ptr mem)
+ // cond: vo.Val() == 0
+ // result: (TESTW x:(MOVWload {sym} [vo.Off32()] ptr mem) x)
+ for {
+ vo := auxIntToValAndOff(v.AuxInt)
+ sym := auxToSym(v.Aux)
+ ptr := v_0
+ mem := v_1
+ if !(vo.Val() == 0) {
+ break
+ }
+ v.reset(OpAMD64TESTW)
+ x := b.NewValue0(v.Pos, OpAMD64MOVWload, typ.UInt16)
+ x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.Aux = symToAux(sym)
+ x.AddArg2(ptr, mem)
+ v.AddArg2(x, x)
+ return true
+ }
+ // match: (CMPWconstload {sym} [vo] ptr mem)
+ // cond: vo.Val() != 0
// result: (CMPWconst (MOVWload {sym} [vo.Off32()] ptr mem) [vo.Val16()])
for {
vo := auxIntToValAndOff(v.AuxInt)
sym := auxToSym(v.Aux)
ptr := v_0
mem := v_1
+ if !(vo.Val() != 0) {
+ break
+ }
v.reset(OpAMD64CMPWconst)
v.AuxInt = int16ToAuxInt(vo.Val16())
v0 := b.NewValue0(v.Pos, OpAMD64MOVWload, typ.UInt16)
v.AddArg(v0)
return true
}
+ return false
}
func rewriteValueAMD64splitload_OpAMD64CMPWconstloadidx1(v *Value) bool {
v_2 := v.Args[2]
b := v.Block
typ := &b.Func.Config.Types
// match: (CMPWconstloadidx1 {sym} [vo] ptr idx mem)
+ // cond: vo.Val() == 0
+ // result: (TESTW x:(MOVWloadidx1 {sym} [vo.Off32()] ptr idx mem) x)
+ for {
+ vo := auxIntToValAndOff(v.AuxInt)
+ sym := auxToSym(v.Aux)
+ ptr := v_0
+ idx := v_1
+ mem := v_2
+ if !(vo.Val() == 0) {
+ break
+ }
+ v.reset(OpAMD64TESTW)
+ x := b.NewValue0(v.Pos, OpAMD64MOVWloadidx1, typ.UInt16)
+ x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.Aux = symToAux(sym)
+ x.AddArg3(ptr, idx, mem)
+ v.AddArg2(x, x)
+ return true
+ }
+ // match: (CMPWconstloadidx1 {sym} [vo] ptr idx mem)
+ // cond: vo.Val() != 0
// result: (CMPWconst (MOVWloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val16()])
for {
vo := auxIntToValAndOff(v.AuxInt)
ptr := v_0
idx := v_1
mem := v_2
+ if !(vo.Val() != 0) {
+ break
+ }
v.reset(OpAMD64CMPWconst)
v.AuxInt = int16ToAuxInt(vo.Val16())
v0 := b.NewValue0(v.Pos, OpAMD64MOVWloadidx1, typ.UInt16)
v.AddArg(v0)
return true
}
+ return false
}
func rewriteValueAMD64splitload_OpAMD64CMPWconstloadidx2(v *Value) bool {
v_2 := v.Args[2]
b := v.Block
typ := &b.Func.Config.Types
// match: (CMPWconstloadidx2 {sym} [vo] ptr idx mem)
+ // cond: vo.Val() == 0
+ // result: (TESTW x:(MOVWloadidx2 {sym} [vo.Off32()] ptr idx mem) x)
+ for {
+ vo := auxIntToValAndOff(v.AuxInt)
+ sym := auxToSym(v.Aux)
+ ptr := v_0
+ idx := v_1
+ mem := v_2
+ if !(vo.Val() == 0) {
+ break
+ }
+ v.reset(OpAMD64TESTW)
+ x := b.NewValue0(v.Pos, OpAMD64MOVWloadidx2, typ.UInt16)
+ x.AuxInt = int32ToAuxInt(vo.Off32())
+ x.Aux = symToAux(sym)
+ x.AddArg3(ptr, idx, mem)
+ v.AddArg2(x, x)
+ return true
+ }
+ // match: (CMPWconstloadidx2 {sym} [vo] ptr idx mem)
+ // cond: vo.Val() != 0
// result: (CMPWconst (MOVWloadidx2 {sym} [vo.Off32()] ptr idx mem) [vo.Val16()])
for {
vo := auxIntToValAndOff(v.AuxInt)
ptr := v_0
idx := v_1
mem := v_2
+ if !(vo.Val() != 0) {
+ break
+ }
v.reset(OpAMD64CMPWconst)
v.AuxInt = int16ToAuxInt(vo.Val16())
v0 := b.NewValue0(v.Pos, OpAMD64MOVWloadidx2, typ.UInt16)
v.AddArg(v0)
return true
}
+ return false
}
func rewriteValueAMD64splitload_OpAMD64CMPWload(v *Value) bool {
v_2 := v.Args[2]