panic(fmt.Sprintf("bad int register width %d:%s", t.Size(), t))
}
}
- panic("bad register type")
}
// opregreg emits instructions for
}
var eqfJumps = [2][2]gc.FloatingEQNEJump{
- {{x86.AJNE, 1}, {x86.AJPS, 1}}, // next == b.Succs[0]
- {{x86.AJNE, 1}, {x86.AJPC, 0}}, // next == b.Succs[1]
+ {{Jump: x86.AJNE, Index: 1}, {Jump: x86.AJPS, Index: 1}}, // next == b.Succs[0]
+ {{Jump: x86.AJNE, Index: 1}, {Jump: x86.AJPC, Index: 0}}, // next == b.Succs[1]
}
var nefJumps = [2][2]gc.FloatingEQNEJump{
- {{x86.AJNE, 0}, {x86.AJPC, 1}}, // next == b.Succs[0]
- {{x86.AJNE, 0}, {x86.AJPS, 0}}, // next == b.Succs[1]
+ {{Jump: x86.AJNE, Index: 0}, {Jump: x86.AJPC, Index: 1}}, // next == b.Succs[0]
+ {{Jump: x86.AJNE, Index: 0}, {Jump: x86.AJPS, Index: 0}}, // next == b.Succs[1]
}
func ssaGenBlock(s *gc.SSAGenState, b, next *ssa.Block) {
if b.Succs[0] != next {
p := gc.Prog(obj.AJMP)
p.To.Type = obj.TYPE_BRANCH
- s.Branches = append(s.Branches, gc.Branch{p, b.Succs[0]})
+ s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0]})
}
case ssa.BlockDefer:
// defer returns in rax:
p.To.Reg = x86.REG_AX
p = gc.Prog(x86.AJNE)
p.To.Type = obj.TYPE_BRANCH
- s.Branches = append(s.Branches, gc.Branch{p, b.Succs[1]})
+ s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1]})
if b.Succs[0] != next {
p := gc.Prog(obj.AJMP)
p.To.Type = obj.TYPE_BRANCH
- s.Branches = append(s.Branches, gc.Branch{p, b.Succs[0]})
+ s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0]})
}
case ssa.BlockExit:
gc.Prog(obj.AUNDEF) // tell plive.go that we never reach here
p = gc.Prog(jmp.invasm)
likely *= -1
p.To.Type = obj.TYPE_BRANCH
- s.Branches = append(s.Branches, gc.Branch{p, b.Succs[1]})
+ s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1]})
case b.Succs[1]:
p = gc.Prog(jmp.asm)
p.To.Type = obj.TYPE_BRANCH
- s.Branches = append(s.Branches, gc.Branch{p, b.Succs[0]})
+ s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0]})
default:
p = gc.Prog(jmp.asm)
p.To.Type = obj.TYPE_BRANCH
- s.Branches = append(s.Branches, gc.Branch{p, b.Succs[0]})
+ s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0]})
q := gc.Prog(obj.AJMP)
q.To.Type = obj.TYPE_BRANCH
- s.Branches = append(s.Branches, gc.Branch{q, b.Succs[1]})
+ s.Branches = append(s.Branches, gc.Branch{P: q, B: b.Succs[1]})
}
// liblink reorders the instruction stream as it sees fit.
if b.Succs[0] != next {
p := gc.Prog(obj.AJMP)
p.To.Type = obj.TYPE_BRANCH
- s.Branches = append(s.Branches, gc.Branch{p, b.Succs[0]})
+ s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0]})
}
case ssa.BlockRet:
gc.Prog(obj.ARET)
case ssa.BlockARMLT:
p := gc.Prog(arm.ABLT)
p.To.Type = obj.TYPE_BRANCH
- s.Branches = append(s.Branches, gc.Branch{p, b.Succs[0]})
+ s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[0]})
p = gc.Prog(obj.AJMP)
p.To.Type = obj.TYPE_BRANCH
- s.Branches = append(s.Branches, gc.Branch{p, b.Succs[1]})
+ s.Branches = append(s.Branches, gc.Branch{P: p, B: b.Succs[1]})
}
}
ls := Linksym(sym)
ls.Name = fmt.Sprintf("gclocals·%x", md5.Sum(ls.P))
ls.Dupok = true
- sv := obj.SymVer{ls.Name, 0}
+ sv := obj.SymVer{Name: ls.Name, Version: 0}
ls2, ok := Ctxt.Hash[sv]
if ok {
sym.Lsym = ls2
s.stmtList(n.Ninit)
switch n.Op {
case OCFUNC:
- aux := s.lookupSymbol(n, &ssa.ExternSymbol{n.Type, n.Left.Sym})
+ aux := s.lookupSymbol(n, &ssa.ExternSymbol{Typ: n.Type, Sym: n.Left.Sym})
return s.entryNewValue1A(ssa.OpAddr, n.Type, aux, s.sb)
case OPARAM:
addr := s.addr(n, false)
if n.Class == PFUNC {
// "value" of a function is the address of the function's closure
sym := funcsym(n.Sym)
- aux := &ssa.ExternSymbol{n.Type, sym}
+ aux := &ssa.ExternSymbol{Typ: n.Type, Sym: sym}
return s.entryNewValue1A(ssa.OpAddr, Ptrto(n.Type), aux, s.sb)
}
if s.canSSA(n) {
// Call growslice
s.startBlock(grow)
- taddr := s.newValue1A(ssa.OpAddr, Types[TUINTPTR], &ssa.ExternSymbol{Types[TUINTPTR], typenamesym(n.Type.Elem())}, s.sb)
+ taddr := s.newValue1A(ssa.OpAddr, Types[TUINTPTR], &ssa.ExternSymbol{Typ: Types[TUINTPTR], Sym: typenamesym(n.Type.Elem())}, s.sb)
r := s.rtcall(growslice, true, []*Type{pt, Types[TINT], Types[TINT]}, taddr, p, l, c, nl)
switch n.Class {
case PEXTERN:
// global variable
- aux := s.lookupSymbol(n, &ssa.ExternSymbol{n.Type, n.Sym})
+ aux := s.lookupSymbol(n, &ssa.ExternSymbol{Typ: n.Type, Sym: n.Sym})
v := s.entryNewValue1A(ssa.OpAddr, t, aux, s.sb)
// TODO: Make OpAddr use AuxInt as well as Aux.
if n.Xoffset != 0 {
bElse := s.f.NewBlock(ssa.BlockPlain)
bEnd := s.f.NewBlock(ssa.BlockPlain)
- aux := &ssa.ExternSymbol{Types[TBOOL], syslook("writeBarrier").Sym}
+ aux := &ssa.ExternSymbol{Typ: Types[TBOOL], Sym: syslook("writeBarrier").Sym}
flagaddr := s.newValue1A(ssa.OpAddr, Ptrto(Types[TUINT32]), aux, s.sb)
// TODO: select the .enabled field. It is currently first, so not needed for now.
// Load word, test byte, avoiding partial register write from load byte.
b.AddEdgeTo(bElse)
s.startBlock(bThen)
- taddr := s.newValue1A(ssa.OpAddr, Types[TUINTPTR], &ssa.ExternSymbol{Types[TUINTPTR], typenamesym(t)}, s.sb)
+ taddr := s.newValue1A(ssa.OpAddr, Types[TUINTPTR], &ssa.ExternSymbol{Typ: Types[TUINTPTR], Sym: typenamesym(t)}, s.sb)
s.rtcall(typedmemmove, true, nil, taddr, left, right)
s.endBlock().AddEdgeTo(bEnd)
bElse := s.f.NewBlock(ssa.BlockPlain)
bEnd := s.f.NewBlock(ssa.BlockPlain)
- aux := &ssa.ExternSymbol{Types[TBOOL], syslook("writeBarrier").Sym}
+ aux := &ssa.ExternSymbol{Typ: Types[TBOOL], Sym: syslook("writeBarrier").Sym}
flagaddr := s.newValue1A(ssa.OpAddr, Ptrto(Types[TUINT32]), aux, s.sb)
// TODO: select the .enabled field. It is currently first, so not needed for now.
// Load word, test byte, avoiding partial register write from load byte.
if !commaok {
// on failure, panic by calling panicdottype
s.startBlock(bFail)
- taddr := s.newValue1A(ssa.OpAddr, byteptr, &ssa.ExternSymbol{byteptr, typenamesym(n.Left.Type)}, s.sb)
+ taddr := s.newValue1A(ssa.OpAddr, byteptr, &ssa.ExternSymbol{Typ: byteptr, Sym: typenamesym(n.Left.Type)}, s.sb)
s.rtcall(panicdottype, false, nil, typ, target, taddr)
// on success, return idata field
if f.StaticData != nil {
for _, n := range f.StaticData.([]*Node) {
if !gen_as_init(n, false) {
- Fatalf("non-static data marked as static: %v\n\n", n, f)
+ Fatalf("non-static data marked as static: %v\n\n", n)
}
}
}
}
func AddAux2(a *obj.Addr, v *ssa.Value, offset int64) {
if a.Type != obj.TYPE_MEM {
- v.Fatalf("bad AddAux addr %s", a)
+ v.Fatalf("bad AddAux addr %v", a)
}
// add integer offset
a.Offset += offset
// Split this string up into two separate variables.
p := e.namedAuto(n.Sym.Name+".ptr", ptrType)
l := e.namedAuto(n.Sym.Name+".len", lenType)
- return ssa.LocalSlot{p, ptrType, 0}, ssa.LocalSlot{l, lenType, 0}
+ return ssa.LocalSlot{N: p, Type: ptrType, Off: 0}, ssa.LocalSlot{N: l, Type: lenType, Off: 0}
}
// Return the two parts of the larger variable.
- return ssa.LocalSlot{n, ptrType, name.Off}, ssa.LocalSlot{n, lenType, name.Off + int64(Widthptr)}
+ return ssa.LocalSlot{N: n, Type: ptrType, Off: name.Off}, ssa.LocalSlot{N: n, Type: lenType, Off: name.Off + int64(Widthptr)}
}
func (e *ssaExport) SplitInterface(name ssa.LocalSlot) (ssa.LocalSlot, ssa.LocalSlot) {
}
c := e.namedAuto(n.Sym.Name+f, t)
d := e.namedAuto(n.Sym.Name+".data", t)
- return ssa.LocalSlot{c, t, 0}, ssa.LocalSlot{d, t, 0}
+ return ssa.LocalSlot{N: c, Type: t, Off: 0}, ssa.LocalSlot{N: d, Type: t, Off: 0}
}
// Return the two parts of the larger variable.
- return ssa.LocalSlot{n, t, name.Off}, ssa.LocalSlot{n, t, name.Off + int64(Widthptr)}
+ return ssa.LocalSlot{N: n, Type: t, Off: name.Off}, ssa.LocalSlot{N: n, Type: t, Off: name.Off + int64(Widthptr)}
}
func (e *ssaExport) SplitSlice(name ssa.LocalSlot) (ssa.LocalSlot, ssa.LocalSlot, ssa.LocalSlot) {
p := e.namedAuto(n.Sym.Name+".ptr", ptrType)
l := e.namedAuto(n.Sym.Name+".len", lenType)
c := e.namedAuto(n.Sym.Name+".cap", lenType)
- return ssa.LocalSlot{p, ptrType, 0}, ssa.LocalSlot{l, lenType, 0}, ssa.LocalSlot{c, lenType, 0}
+ return ssa.LocalSlot{N: p, Type: ptrType, Off: 0}, ssa.LocalSlot{N: l, Type: lenType, Off: 0}, ssa.LocalSlot{N: c, Type: lenType, Off: 0}
}
// Return the three parts of the larger variable.
- return ssa.LocalSlot{n, ptrType, name.Off},
- ssa.LocalSlot{n, lenType, name.Off + int64(Widthptr)},
- ssa.LocalSlot{n, lenType, name.Off + int64(2*Widthptr)}
+ return ssa.LocalSlot{N: n, Type: ptrType, Off: name.Off},
+ ssa.LocalSlot{N: n, Type: lenType, Off: name.Off + int64(Widthptr)},
+ ssa.LocalSlot{N: n, Type: lenType, Off: name.Off + int64(2*Widthptr)}
}
func (e *ssaExport) SplitComplex(name ssa.LocalSlot) (ssa.LocalSlot, ssa.LocalSlot) {
// Split this complex up into two separate variables.
c := e.namedAuto(n.Sym.Name+".real", t)
d := e.namedAuto(n.Sym.Name+".imag", t)
- return ssa.LocalSlot{c, t, 0}, ssa.LocalSlot{d, t, 0}
+ return ssa.LocalSlot{N: c, Type: t, Off: 0}, ssa.LocalSlot{N: d, Type: t, Off: 0}
}
// Return the two parts of the larger variable.
- return ssa.LocalSlot{n, t, name.Off}, ssa.LocalSlot{n, t, name.Off + s}
+ return ssa.LocalSlot{N: n, Type: t, Off: name.Off}, ssa.LocalSlot{N: n, Type: t, Off: name.Off + s}
}
func (e *ssaExport) SplitStruct(name ssa.LocalSlot, i int) ssa.LocalSlot {
// have no fear, identically-named but distinct Autos are
// ok, albeit maybe confusing for a debugger.
x := e.namedAuto(n.Sym.Name+"."+st.FieldName(i), ft)
- return ssa.LocalSlot{x, ft, 0}
+ return ssa.LocalSlot{N: x, Type: ft, Off: 0}
}
- return ssa.LocalSlot{n, ft, name.Off + st.FieldOff(i)}
+ return ssa.LocalSlot{N: n, Type: ft, Off: name.Off + st.FieldOff(i)}
}
// namedAuto returns a new AUTO variable with the given name and type.
f.Fatalf("ret block %s has successors", b)
}
if b.Control == nil {
- f.Fatalf("ret block %s has nil control %s", b)
+ f.Fatalf("ret block %s has nil control", b)
}
if !b.Control.Type.IsMemory() {
f.Fatalf("ret block %s has non-memory control value %s", b, b.Control.LongString())
f.Fatalf("retjmp block %s len(Succs)==%d, want 0", b, len(b.Succs))
}
if b.Control == nil {
- f.Fatalf("retjmp block %s has nil control %s", b)
+ f.Fatalf("retjmp block %s has nil control", b)
}
if !b.Control.Type.IsMemory() {
f.Fatalf("retjmp block %s has non-memory control value %s", b, b.Control.LongString())
}
}
if len(b.Succs) > 2 && b.Likely != BranchUnknown {
- f.Fatalf("likeliness prediction %d for block %s with %d successors: %s", b.Likely, b, len(b.Succs))
+ f.Fatalf("likeliness prediction %d for block %s with %d successors", b.Likely, b, len(b.Succs))
}
for _, v := range b.Values {
(ADDQconst [c] (ADDQconst [d] x)) && is32Bit(c+d) -> (ADDQconst [c+d] x)
(ADDLconst [c] (ADDLconst [d] x)) -> (ADDLconst [int64(int32(c+d))] x)
(SUBQconst (MOVQconst [d]) [c]) -> (MOVQconst [d-c])
-(SUBLconst (MOVLconst [d]) [c]) -> (MOVLconst [int64(int32(d-c))])
(SUBQconst (SUBQconst x [d]) [c]) && is32Bit(-c-d) -> (ADDQconst [-c-d] x)
-(SUBLconst (SUBLconst x [d]) [c]) -> (ADDLconst [int64(int32(-c-d))] x)
(SARQconst [c] (MOVQconst [d])) -> (MOVQconst [d>>uint64(c)])
(SARLconst [c] (MOVQconst [d])) -> (MOVQconst [d>>uint64(c)])
(SARWconst [c] (MOVQconst [d])) -> (MOVQconst [d>>uint64(c)])
fmt.Fprintf(w, "func rewriteValue%s_%s(v *Value, config *Config) bool {\n", arch.name, opName(op, arch))
fmt.Fprintln(w, "b := v.Block")
fmt.Fprintln(w, "_ = b")
- for _, rule := range oprules[op] {
+ var canFail bool
+ for i, rule := range oprules[op] {
match, cond, result := rule.parse()
fmt.Fprintf(w, "// match: %s\n", match)
fmt.Fprintf(w, "// cond: %s\n", cond)
fmt.Fprintf(w, "// result: %s\n", result)
+ canFail = false
fmt.Fprintf(w, "for {\n")
- genMatch(w, arch, match, rule.loc)
+ if genMatch(w, arch, match, rule.loc) {
+ canFail = true
+ }
if cond != "" {
fmt.Fprintf(w, "if !(%s) {\nbreak\n}\n", cond)
+ canFail = true
+ }
+ if !canFail && i != len(oprules[op])-1 {
+ log.Fatalf("unconditional rule %s is followed by other rules", match)
}
genResult(w, arch, result, rule.loc)
fmt.Fprintf(w, "}\n")
}
- fmt.Fprintf(w, "return false\n")
+ if canFail {
+ fmt.Fprintf(w, "return false\n")
+ }
fmt.Fprintf(w, "}\n")
}
}
}
-func genMatch(w io.Writer, arch arch, match string, loc string) {
- genMatch0(w, arch, match, "v", map[string]struct{}{}, true, loc)
+// genMatch returns true if the match can fail.
+func genMatch(w io.Writer, arch arch, match string, loc string) bool {
+ return genMatch0(w, arch, match, "v", map[string]struct{}{}, true, loc)
}
-func genMatch0(w io.Writer, arch arch, match, v string, m map[string]struct{}, top bool, loc string) {
+func genMatch0(w io.Writer, arch arch, match, v string, m map[string]struct{}, top bool, loc string) bool {
if match[0] != '(' || match[len(match)-1] != ')' {
panic("non-compound expr in genMatch0: " + match)
}
+ canFail := false
// split body up into regions. Split by spaces/tabs, except those
// contained in () or {}.
// check op
if !top {
fmt.Fprintf(w, "if %s.Op != %s {\nbreak\n}\n", v, opName(s[0], arch))
+ canFail = true
}
// check type/aux/args
if !isVariable(t) {
// code. We must match the results of this code.
fmt.Fprintf(w, "if %s.Type != %s {\nbreak\n}\n", v, t)
+ canFail = true
} else {
// variable
if _, ok := m[t]; ok {
// must match previous variable
fmt.Fprintf(w, "if %s.Type != %s {\nbreak\n}\n", v, t)
+ canFail = true
} else {
m[t] = struct{}{}
fmt.Fprintf(w, "%s := %s.Type\n", t, v)
if !isVariable(x) {
// code
fmt.Fprintf(w, "if %s.AuxInt != %s {\nbreak\n}\n", v, x)
+ canFail = true
} else {
// variable
if _, ok := m[x]; ok {
fmt.Fprintf(w, "if %s.AuxInt != %s {\nbreak\n}\n", v, x)
+ canFail = true
} else {
m[x] = struct{}{}
fmt.Fprintf(w, "%s := %s.AuxInt\n", x, v)
if !isVariable(x) {
// code
fmt.Fprintf(w, "if %s.Aux != %s {\nbreak\n}\n", v, x)
+ canFail = true
} else {
// variable
if _, ok := m[x]; ok {
fmt.Fprintf(w, "if %s.Aux != %s {\nbreak\n}\n", v, x)
+ canFail = true
} else {
m[x] = struct{}{}
fmt.Fprintf(w, "%s := %s.Aux\n", x, v)
// For example, (add x x). Equality is just pointer equality
// on Values (so cse is important to do before lowering).
fmt.Fprintf(w, "if %s != %s.Args[%d] {\nbreak\n}\n", a, v, argnum)
+ canFail = true
} else {
// remember that this variable references the given value
m[a] = struct{}{}
argname = fmt.Sprintf("%s_%d", v, argnum)
}
fmt.Fprintf(w, "%s := %s.Args[%d]\n", argname, v, argnum)
- genMatch0(w, arch, a, argname, m, false, loc)
+ if genMatch0(w, arch, a, argname, m, false, loc) {
+ canFail = true
+ }
argnum++
}
}
if op.argLength == -1 {
fmt.Fprintf(w, "if len(%s.Args) != %d {\nbreak\n}\n", v, argnum)
+ canFail = true
} else if int(op.argLength) != argnum {
log.Fatalf("%s: op %s should have %d args, has %d", loc, op.name, op.argLength, argnum)
}
+ return canFail
}
func genResult(w io.Writer, arch arch, result string, loc string) {
return x
}
panic(fmt.Sprintf("mergeSym with two non-nil syms %s %s", x, y))
- return nil
}
func canMergeSym(x, y interface{}) bool {
return x == nil || y == nil
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpAdd32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpAdd32F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpAdd64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpAdd64F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpAdd8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpAddPtr(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpAddr(v *Value, config *Config) bool {
b := v.Block
v.AddArg(base)
return true
}
- return false
}
func rewriteValueAMD64_OpAnd16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpAnd32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpAnd64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpAnd8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpAndB(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpAvg64u(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpBswap32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpBswap64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpAMD64CMOVLEQconst(v *Value, config *Config) bool {
b := v.Block
v.AddArg(mem)
return true
}
- return false
}
func rewriteValueAMD64_OpCom16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpCom32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpCom64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpCom8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpConst16(v *Value, config *Config) bool {
b := v.Block
v.AuxInt = val
return true
}
- return false
}
func rewriteValueAMD64_OpConst32(v *Value, config *Config) bool {
b := v.Block
v.AuxInt = val
return true
}
- return false
}
func rewriteValueAMD64_OpConst32F(v *Value, config *Config) bool {
b := v.Block
v.AuxInt = val
return true
}
- return false
}
func rewriteValueAMD64_OpConst64(v *Value, config *Config) bool {
b := v.Block
v.AuxInt = val
return true
}
- return false
}
func rewriteValueAMD64_OpConst64F(v *Value, config *Config) bool {
b := v.Block
v.AuxInt = val
return true
}
- return false
}
func rewriteValueAMD64_OpConst8(v *Value, config *Config) bool {
b := v.Block
v.AuxInt = val
return true
}
- return false
}
func rewriteValueAMD64_OpConstBool(v *Value, config *Config) bool {
b := v.Block
v.AuxInt = b
return true
}
- return false
}
func rewriteValueAMD64_OpConstNil(v *Value, config *Config) bool {
b := v.Block
v.AuxInt = 0
return true
}
- return false
}
func rewriteValueAMD64_OpConvert(v *Value, config *Config) bool {
b := v.Block
v.AddArg(mem)
return true
}
- return false
}
func rewriteValueAMD64_OpCtz16(v *Value, config *Config) bool {
b := v.Block
v.AuxInt = 16
return true
}
- return false
}
func rewriteValueAMD64_OpCtz32(v *Value, config *Config) bool {
b := v.Block
v.AuxInt = 32
return true
}
- return false
}
func rewriteValueAMD64_OpCtz64(v *Value, config *Config) bool {
b := v.Block
v.AuxInt = 64
return true
}
- return false
}
func rewriteValueAMD64_OpCvt32Fto32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpCvt32Fto64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpCvt32Fto64F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpCvt32to32F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpCvt32to64F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpCvt64Fto32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpCvt64Fto32F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpCvt64Fto64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpCvt64to32F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpCvt64to64F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpDeferCall(v *Value, config *Config) bool {
b := v.Block
v.AddArg(mem)
return true
}
- return false
}
func rewriteValueAMD64_OpDiv16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpDiv16u(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpDiv32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpDiv32F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpDiv32u(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpDiv64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpDiv64F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpDiv64u(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpDiv8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpDiv8u(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpEq16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpEq32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpEq32F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpEq64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpEq64F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpEq8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpEqB(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpEqPtr(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpGeq16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpGeq16U(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpGeq32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpGeq32F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpGeq32U(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpGeq64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpGeq64F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpGeq64U(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpGeq8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpGeq8U(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpGetClosurePtr(v *Value, config *Config) bool {
b := v.Block
v.reset(OpAMD64LoweredGetClosurePtr)
return true
}
- return false
}
func rewriteValueAMD64_OpGetG(v *Value, config *Config) bool {
b := v.Block
v.AddArg(mem)
return true
}
- return false
}
func rewriteValueAMD64_OpGoCall(v *Value, config *Config) bool {
b := v.Block
v.AddArg(mem)
return true
}
- return false
}
func rewriteValueAMD64_OpGreater16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpGreater16U(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpGreater32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpGreater32F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpGreater32U(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpGreater64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpGreater64F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpGreater64U(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpGreater8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpGreater8U(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpHmul16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpHmul16u(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpHmul32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpHmul32u(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpHmul64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpHmul64u(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpHmul8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpHmul8u(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpITab(v *Value, config *Config) bool {
b := v.Block
v.AddArg(mem)
return true
}
- return false
}
func rewriteValueAMD64_OpIsInBounds(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpIsNonNil(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpIsSliceInBounds(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpAMD64LEAQ(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpLeq16U(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpLeq32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpLeq32F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpLeq32U(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpLeq64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpLeq64F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpLeq64U(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpLeq8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpLeq8U(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpLess16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpLess16U(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpLess32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpLess32F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpLess32U(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpLess64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpLess64F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpLess64U(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpLess8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpLess8U(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpLoad(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpLrot32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpLrot64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpLrot8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpLsh16x16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpLsh16x32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpLsh16x64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpLsh16x8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpLsh32x16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpLsh32x32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpLsh32x64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpLsh32x8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpLsh64x16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpLsh64x32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpLsh64x64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpLsh64x8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpLsh8x16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpLsh8x32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpLsh8x64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpLsh8x8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpAMD64MOVBQSX(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpMod16u(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpMod32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpMod32u(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpMod64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpMod64u(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpMod8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpMod8u(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpMove(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpMul32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpMul32F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpMul64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpMul64F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpMul8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpAMD64NEGL(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpNeg32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpNeg32F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpNeg64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpNeg64F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpNeg8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpNeq16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpNeq32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpNeq32F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpNeq64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpNeq64F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpNeq8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpNeqB(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpNeqPtr(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpNilCheck(v *Value, config *Config) bool {
b := v.Block
v.AddArg(mem)
return true
}
- return false
}
func rewriteValueAMD64_OpNot(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpAMD64ORL(v *Value, config *Config) bool {
b := v.Block
v.AddArg(ptr)
return true
}
- return false
}
func rewriteValueAMD64_OpOr16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpOr32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpOr64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpOr8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpOrB(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh16Ux16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh16Ux32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh16Ux64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh16Ux8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh16x16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh16x32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh16x64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh16x8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh32Ux16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh32Ux32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh32Ux64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh32Ux8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh32x16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh32x32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh32x64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh32x8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh64Ux16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh64Ux32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh64Ux64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh64Ux8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh64x16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh64x32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh64x64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh64x8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh8Ux16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh8Ux32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh8Ux64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh8Ux8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh8x16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh8x32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh8x64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpRsh8x8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueAMD64_OpAMD64SARB(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- // match: (SUBLconst (MOVLconst [d]) [c])
- // cond:
- // result: (MOVLconst [int64(int32(d-c))])
- for {
- v_0 := v.Args[0]
- if v_0.Op != OpAMD64MOVLconst {
- break
- }
- d := v_0.AuxInt
- c := v.AuxInt
- v.reset(OpAMD64MOVLconst)
- v.AuxInt = int64(int32(d - c))
- return true
- }
- // match: (SUBLconst (SUBLconst x [d]) [c])
- // cond:
- // result: (ADDLconst [int64(int32(-c-d))] x)
- for {
- v_0 := v.Args[0]
- if v_0.Op != OpAMD64SUBLconst {
- break
- }
- x := v_0.Args[0]
- d := v_0.AuxInt
- c := v.AuxInt
- v.reset(OpAMD64ADDLconst)
- v.AuxInt = int64(int32(-c - d))
- v.AddArg(x)
- return true
- }
- return false
}
func rewriteValueAMD64_OpAMD64SUBQ(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpSignExt16to64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpSignExt32to64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpSignExt8to16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpSignExt8to32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpSignExt8to64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpSqrt(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpStaticCall(v *Value, config *Config) bool {
b := v.Block
v.AddArg(mem)
return true
}
- return false
}
func rewriteValueAMD64_OpStore(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpSub32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpSub32F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpSub64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpSub64F(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpSub8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpSubPtr(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpTrunc16to8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpTrunc32to16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpTrunc32to8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpTrunc64to16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpTrunc64to32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpTrunc64to8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpAMD64XORL(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpXor32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpXor64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpXor8(v *Value, config *Config) bool {
b := v.Block
v.AddArg(y)
return true
}
- return false
}
func rewriteValueAMD64_OpZero(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpZeroExt16to64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpZeroExt32to64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpZeroExt8to16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpZeroExt8to32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteValueAMD64_OpZeroExt8to64(v *Value, config *Config) bool {
b := v.Block
v.AddArg(x)
return true
}
- return false
}
func rewriteBlockAMD64(b *Block) bool {
switch b.Kind {
v.AddArg(y)
return true
}
- return false
}
func rewriteValueARM_OpAddr(v *Value, config *Config) bool {
b := v.Block
v.AddArg(base)
return true
}
- return false
}
func rewriteValueARM_OpConst32(v *Value, config *Config) bool {
b := v.Block
v.AuxInt = val
return true
}
- return false
}
func rewriteValueARM_OpLess32(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v0)
return true
}
- return false
}
func rewriteValueARM_OpLoad(v *Value, config *Config) bool {
b := v.Block
v.AddArg(ptr)
return true
}
- return false
}
func rewriteValueARM_OpStaticCall(v *Value, config *Config) bool {
b := v.Block
v.AddArg(mem)
return true
}
- return false
}
func rewriteValueARM_OpStore(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValuegeneric_OpConstSlice(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValuegeneric_OpEqPtr(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValuegeneric_OpGeq16(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValuegeneric_OpNeqPtr(v *Value, config *Config) bool {
b := v.Block
v.AddArg(v1)
return true
}
- return false
}
func rewriteValuegeneric_OpOffPtr(v *Value, config *Config) bool {
b := v.Block