}
}
+//go:noinline
+func orPhi_ssa(a bool, x int) int {
+ v := 0
+ if a {
+ v = -1
+ } else {
+ v = -1
+ }
+ return x | v
+}
+
+func testOrPhi() {
+ if want, got := -1, orPhi_ssa(true, 4); got != want {
+ println("orPhi_ssa(true, 4)=", got, " want ", want)
+ }
+ if want, got := -1, orPhi_ssa(false, 0); got != want {
+ println("orPhi_ssa(false, 0)=", got, " want ", want)
+ }
+}
+
var failed = false
func main() {
(Sub8 (Add8 x y) x) -> y
(Sub8 (Add8 x y) y) -> x
+// basic phi simplifications
+(Phi (Const8 [c]) (Const8 [d])) && int8(c) == int8(d) -> (Const8 [c])
+(Phi (Const16 [c]) (Const16 [d])) && int16(c) == int16(d) -> (Const16 [c])
+(Phi (Const32 [c]) (Const32 [d])) && int32(c) == int32(d) -> (Const32 [c])
+(Phi (Const64 [c]) (Const64 [c])) -> (Const64 [c])
+
// user nil checks
(NeqPtr p (ConstNil)) -> (IsNonNil p)
(NeqPtr (ConstNil) p) -> (IsNonNil p)
{name: "Sqrt"}, // sqrt(arg0), float64 only
// Data movement
- {name: "Phi"}, // select an argument based on which predecessor block we came from
- {name: "Copy"}, // output = arg0
+ {name: "Phi", variableLength: true}, // select an argument based on which predecessor block we came from
+ {name: "Copy"}, // output = arg0
// Convert converts between pointers and integers.
// We have a special op for this so as to not confuse GC
// (particularly stack maps). It takes a memory arg so it
typ string // default result type
aux string
rematerializeable bool
+ variableLength bool // if true the operation has a variable number of arguments
}
type blockData struct {
argnum++
}
}
+
+ variableLength := false
+ for _, op := range genericOps {
+ if op.name == s[0] {
+ variableLength = op.variableLength
+ break
+ }
+ }
+ for _, op := range arch.ops {
+ if op.name == s[0] {
+ variableLength = op.variableLength
+ break
+ }
+ }
+ if variableLength {
+ fmt.Fprintf(w, "if len(%s.Args) != %d {\nbreak\n}\n", v, argnum)
+ }
}
func genResult(w io.Writer, arch arch, result string) {
return rewriteValuegeneric_OpOr64(v, config)
case OpOr8:
return rewriteValuegeneric_OpOr8(v, config)
+ case OpPhi:
+ return rewriteValuegeneric_OpPhi(v, config)
case OpPtrIndex:
return rewriteValuegeneric_OpPtrIndex(v, config)
case OpRsh16Ux16:
}
return false
}
+func rewriteValuegeneric_OpPhi(v *Value, config *Config) bool {
+ b := v.Block
+ _ = b
+ // match: (Phi (Const8 [c]) (Const8 [d]))
+ // cond: int8(c) == int8(d)
+ // result: (Const8 [c])
+ for {
+ if v.Args[0].Op != OpConst8 {
+ break
+ }
+ c := v.Args[0].AuxInt
+ if v.Args[1].Op != OpConst8 {
+ break
+ }
+ d := v.Args[1].AuxInt
+ if len(v.Args) != 2 {
+ break
+ }
+ if !(int8(c) == int8(d)) {
+ break
+ }
+ v.reset(OpConst8)
+ v.AuxInt = c
+ return true
+ }
+ // match: (Phi (Const16 [c]) (Const16 [d]))
+ // cond: int16(c) == int16(d)
+ // result: (Const16 [c])
+ for {
+ if v.Args[0].Op != OpConst16 {
+ break
+ }
+ c := v.Args[0].AuxInt
+ if v.Args[1].Op != OpConst16 {
+ break
+ }
+ d := v.Args[1].AuxInt
+ if len(v.Args) != 2 {
+ break
+ }
+ if !(int16(c) == int16(d)) {
+ break
+ }
+ v.reset(OpConst16)
+ v.AuxInt = c
+ return true
+ }
+ // match: (Phi (Const32 [c]) (Const32 [d]))
+ // cond: int32(c) == int32(d)
+ // result: (Const32 [c])
+ for {
+ if v.Args[0].Op != OpConst32 {
+ break
+ }
+ c := v.Args[0].AuxInt
+ if v.Args[1].Op != OpConst32 {
+ break
+ }
+ d := v.Args[1].AuxInt
+ if len(v.Args) != 2 {
+ break
+ }
+ if !(int32(c) == int32(d)) {
+ break
+ }
+ v.reset(OpConst32)
+ v.AuxInt = c
+ return true
+ }
+ // match: (Phi (Const64 [c]) (Const64 [c]))
+ // cond:
+ // result: (Const64 [c])
+ for {
+ if v.Args[0].Op != OpConst64 {
+ break
+ }
+ c := v.Args[0].AuxInt
+ if v.Args[1].Op != OpConst64 {
+ break
+ }
+ if v.Args[1].AuxInt != v.Args[0].AuxInt {
+ break
+ }
+ if len(v.Args) != 2 {
+ break
+ }
+ v.reset(OpConst64)
+ v.AuxInt = c
+ return true
+ }
+ return false
+}
func rewriteValuegeneric_OpPtrIndex(v *Value, config *Config) bool {
b := v.Block
_ = b