(MOVLstoreconst [x] {sym} (ADDQ ptr idx) mem) -> (MOVLstoreconstidx1 [x] {sym} ptr idx mem)
(MOVQstoreconst [x] {sym} (ADDQ ptr idx) mem) -> (MOVQstoreconstidx1 [x] {sym} ptr idx mem)
+
+// combine SHLQ into indexed loads and stores
+(MOVWloadidx1 [c] {sym} ptr (SHLQconst [1] idx) mem) -> (MOVWloadidx2 [c] {sym} ptr idx mem)
+(MOVLloadidx1 [c] {sym} ptr (SHLQconst [2] idx) mem) -> (MOVLloadidx4 [c] {sym} ptr idx mem)
+(MOVQloadidx1 [c] {sym} ptr (SHLQconst [3] idx) mem) -> (MOVQloadidx8 [c] {sym} ptr idx mem)
+(MOVWstoreidx1 [c] {sym} ptr (SHLQconst [1] idx) val mem) -> (MOVWstoreidx2 [c] {sym} ptr idx val mem)
+(MOVLstoreidx1 [c] {sym} ptr (SHLQconst [2] idx) val mem) -> (MOVLstoreidx4 [c] {sym} ptr idx val mem)
+(MOVQstoreidx1 [c] {sym} ptr (SHLQconst [3] idx) val mem) -> (MOVQstoreidx8 [c] {sym} ptr idx val mem)
+(MOVWstoreconstidx1 [c] {sym} ptr (SHLQconst [1] idx) mem) -> (MOVWstoreconstidx2 [c] {sym} ptr idx mem)
+(MOVLstoreconstidx1 [c] {sym} ptr (SHLQconst [2] idx) mem) -> (MOVLstoreconstidx4 [c] {sym} ptr idx mem)
+(MOVQstoreconstidx1 [c] {sym} ptr (SHLQconst [3] idx) mem) -> (MOVQstoreconstidx8 [c] {sym} ptr idx mem)
+
// combine ADDQ into indexed loads and stores
(MOVBloadidx1 [c] {sym} (ADDQconst [d] ptr) idx mem) -> (MOVBloadidx1 [c+d] {sym} ptr idx mem)
(MOVWloadidx1 [c] {sym} (ADDQconst [d] ptr) idx mem) -> (MOVWloadidx1 [c+d] {sym} ptr idx mem)
func rewriteValueAMD64_OpAMD64MOVLloadidx1(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (MOVLloadidx1 [c] {sym} ptr (SHLQconst [2] idx) mem)
+ // cond:
+ // result: (MOVLloadidx4 [c] {sym} ptr idx mem)
+ for {
+ c := v.AuxInt
+ sym := v.Aux
+ ptr := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpAMD64SHLQconst {
+ break
+ }
+ if v_1.AuxInt != 2 {
+ break
+ }
+ idx := v_1.Args[0]
+ mem := v.Args[2]
+ v.reset(OpAMD64MOVLloadidx4)
+ v.AuxInt = c
+ v.Aux = sym
+ v.AddArg(ptr)
+ v.AddArg(idx)
+ v.AddArg(mem)
+ return true
+ }
// match: (MOVLloadidx1 [c] {sym} (ADDQconst [d] ptr) idx mem)
// cond:
// result: (MOVLloadidx1 [c+d] {sym} ptr idx mem)
func rewriteValueAMD64_OpAMD64MOVLstoreconstidx1(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (MOVLstoreconstidx1 [c] {sym} ptr (SHLQconst [2] idx) mem)
+ // cond:
+ // result: (MOVLstoreconstidx4 [c] {sym} ptr idx mem)
+ for {
+ c := v.AuxInt
+ sym := v.Aux
+ ptr := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpAMD64SHLQconst {
+ break
+ }
+ if v_1.AuxInt != 2 {
+ break
+ }
+ idx := v_1.Args[0]
+ mem := v.Args[2]
+ v.reset(OpAMD64MOVLstoreconstidx4)
+ v.AuxInt = c
+ v.Aux = sym
+ v.AddArg(ptr)
+ v.AddArg(idx)
+ v.AddArg(mem)
+ return true
+ }
// match: (MOVLstoreconstidx1 [x] {sym} (ADDQconst [c] ptr) idx mem)
// cond:
// result: (MOVLstoreconstidx1 [ValAndOff(x).add(c)] {sym} ptr idx mem)
func rewriteValueAMD64_OpAMD64MOVLstoreidx1(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (MOVLstoreidx1 [c] {sym} ptr (SHLQconst [2] idx) val mem)
+ // cond:
+ // result: (MOVLstoreidx4 [c] {sym} ptr idx val mem)
+ for {
+ c := v.AuxInt
+ sym := v.Aux
+ ptr := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpAMD64SHLQconst {
+ break
+ }
+ if v_1.AuxInt != 2 {
+ break
+ }
+ idx := v_1.Args[0]
+ val := v.Args[2]
+ mem := v.Args[3]
+ v.reset(OpAMD64MOVLstoreidx4)
+ v.AuxInt = c
+ v.Aux = sym
+ v.AddArg(ptr)
+ v.AddArg(idx)
+ v.AddArg(val)
+ v.AddArg(mem)
+ return true
+ }
// match: (MOVLstoreidx1 [c] {sym} (ADDQconst [d] ptr) idx val mem)
// cond:
// result: (MOVLstoreidx1 [c+d] {sym} ptr idx val mem)
func rewriteValueAMD64_OpAMD64MOVQloadidx1(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (MOVQloadidx1 [c] {sym} ptr (SHLQconst [3] idx) mem)
+ // cond:
+ // result: (MOVQloadidx8 [c] {sym} ptr idx mem)
+ for {
+ c := v.AuxInt
+ sym := v.Aux
+ ptr := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpAMD64SHLQconst {
+ break
+ }
+ if v_1.AuxInt != 3 {
+ break
+ }
+ idx := v_1.Args[0]
+ mem := v.Args[2]
+ v.reset(OpAMD64MOVQloadidx8)
+ v.AuxInt = c
+ v.Aux = sym
+ v.AddArg(ptr)
+ v.AddArg(idx)
+ v.AddArg(mem)
+ return true
+ }
// match: (MOVQloadidx1 [c] {sym} (ADDQconst [d] ptr) idx mem)
// cond:
// result: (MOVQloadidx1 [c+d] {sym} ptr idx mem)
func rewriteValueAMD64_OpAMD64MOVQstoreconstidx1(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (MOVQstoreconstidx1 [c] {sym} ptr (SHLQconst [3] idx) mem)
+ // cond:
+ // result: (MOVQstoreconstidx8 [c] {sym} ptr idx mem)
+ for {
+ c := v.AuxInt
+ sym := v.Aux
+ ptr := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpAMD64SHLQconst {
+ break
+ }
+ if v_1.AuxInt != 3 {
+ break
+ }
+ idx := v_1.Args[0]
+ mem := v.Args[2]
+ v.reset(OpAMD64MOVQstoreconstidx8)
+ v.AuxInt = c
+ v.Aux = sym
+ v.AddArg(ptr)
+ v.AddArg(idx)
+ v.AddArg(mem)
+ return true
+ }
// match: (MOVQstoreconstidx1 [x] {sym} (ADDQconst [c] ptr) idx mem)
// cond:
// result: (MOVQstoreconstidx1 [ValAndOff(x).add(c)] {sym} ptr idx mem)
func rewriteValueAMD64_OpAMD64MOVQstoreidx1(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (MOVQstoreidx1 [c] {sym} ptr (SHLQconst [3] idx) val mem)
+ // cond:
+ // result: (MOVQstoreidx8 [c] {sym} ptr idx val mem)
+ for {
+ c := v.AuxInt
+ sym := v.Aux
+ ptr := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpAMD64SHLQconst {
+ break
+ }
+ if v_1.AuxInt != 3 {
+ break
+ }
+ idx := v_1.Args[0]
+ val := v.Args[2]
+ mem := v.Args[3]
+ v.reset(OpAMD64MOVQstoreidx8)
+ v.AuxInt = c
+ v.Aux = sym
+ v.AddArg(ptr)
+ v.AddArg(idx)
+ v.AddArg(val)
+ v.AddArg(mem)
+ return true
+ }
// match: (MOVQstoreidx1 [c] {sym} (ADDQconst [d] ptr) idx val mem)
// cond:
// result: (MOVQstoreidx1 [c+d] {sym} ptr idx val mem)
func rewriteValueAMD64_OpAMD64MOVWloadidx1(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (MOVWloadidx1 [c] {sym} ptr (SHLQconst [1] idx) mem)
+ // cond:
+ // result: (MOVWloadidx2 [c] {sym} ptr idx mem)
+ for {
+ c := v.AuxInt
+ sym := v.Aux
+ ptr := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpAMD64SHLQconst {
+ break
+ }
+ if v_1.AuxInt != 1 {
+ break
+ }
+ idx := v_1.Args[0]
+ mem := v.Args[2]
+ v.reset(OpAMD64MOVWloadidx2)
+ v.AuxInt = c
+ v.Aux = sym
+ v.AddArg(ptr)
+ v.AddArg(idx)
+ v.AddArg(mem)
+ return true
+ }
// match: (MOVWloadidx1 [c] {sym} (ADDQconst [d] ptr) idx mem)
// cond:
// result: (MOVWloadidx1 [c+d] {sym} ptr idx mem)
func rewriteValueAMD64_OpAMD64MOVWstoreconstidx1(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (MOVWstoreconstidx1 [c] {sym} ptr (SHLQconst [1] idx) mem)
+ // cond:
+ // result: (MOVWstoreconstidx2 [c] {sym} ptr idx mem)
+ for {
+ c := v.AuxInt
+ sym := v.Aux
+ ptr := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpAMD64SHLQconst {
+ break
+ }
+ if v_1.AuxInt != 1 {
+ break
+ }
+ idx := v_1.Args[0]
+ mem := v.Args[2]
+ v.reset(OpAMD64MOVWstoreconstidx2)
+ v.AuxInt = c
+ v.Aux = sym
+ v.AddArg(ptr)
+ v.AddArg(idx)
+ v.AddArg(mem)
+ return true
+ }
// match: (MOVWstoreconstidx1 [x] {sym} (ADDQconst [c] ptr) idx mem)
// cond:
// result: (MOVWstoreconstidx1 [ValAndOff(x).add(c)] {sym} ptr idx mem)
func rewriteValueAMD64_OpAMD64MOVWstoreidx1(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (MOVWstoreidx1 [c] {sym} ptr (SHLQconst [1] idx) val mem)
+ // cond:
+ // result: (MOVWstoreidx2 [c] {sym} ptr idx val mem)
+ for {
+ c := v.AuxInt
+ sym := v.Aux
+ ptr := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpAMD64SHLQconst {
+ break
+ }
+ if v_1.AuxInt != 1 {
+ break
+ }
+ idx := v_1.Args[0]
+ val := v.Args[2]
+ mem := v.Args[3]
+ v.reset(OpAMD64MOVWstoreidx2)
+ v.AuxInt = c
+ v.Aux = sym
+ v.AddArg(ptr)
+ v.AddArg(idx)
+ v.AddArg(val)
+ v.AddArg(mem)
+ return true
+ }
// match: (MOVWstoreidx1 [c] {sym} (ADDQconst [d] ptr) idx val mem)
// cond:
// result: (MOVWstoreidx1 [c+d] {sym} ptr idx val mem)