]> Cypherpunks repositories - gostls13.git/commitdiff
cmd/compile: remove BTQconst rule
authorGiovanni Bajo <rasky@develer.com>
Sat, 10 Mar 2018 10:17:05 +0000 (11:17 +0100)
committerGiovanni Bajo <rasky@develer.com>
Tue, 13 Mar 2018 23:21:38 +0000 (23:21 +0000)
This rule is meant for code optimization, but it makes other rules
potentially more complex, as they need to cope with the fact that
a 32-bit op (BTLconst) can appear everywhere a 64-bit rule maches.

Move the optimization to opcode expansion instead. Tests will be
added in following CL.

Change-Id: Ica5ef291e7963c4af17c124d4a2869e6c8f7b0c7
Reviewed-on: https://go-review.googlesource.com/99995
Reviewed-by: Keith Randall <khr@golang.org>
src/cmd/compile/internal/amd64/ssa.go
src/cmd/compile/internal/ssa/gen/AMD64.rules
src/cmd/compile/internal/ssa/rewriteAMD64.go

index fedc525463b727d934eda89c6b24db16988fd61e..6b8fe875a4211ffae600723282162f3c7211f345 100644 (file)
@@ -562,8 +562,18 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
                p.From.Reg = v.Args[0].Reg()
                p.To.Type = obj.TYPE_CONST
                p.To.Offset = v.AuxInt
-       case ssa.OpAMD64TESTQconst, ssa.OpAMD64TESTLconst, ssa.OpAMD64TESTWconst, ssa.OpAMD64TESTBconst,
-               ssa.OpAMD64BTLconst, ssa.OpAMD64BTQconst:
+       case ssa.OpAMD64BTLconst, ssa.OpAMD64BTQconst:
+               op := v.Op
+               if op == ssa.OpAMD64BTQconst && v.AuxInt < 32 {
+                       // Emit 32-bit version because it's shorter
+                       op = ssa.OpAMD64BTLconst
+               }
+               p := s.Prog(op.Asm())
+               p.From.Type = obj.TYPE_CONST
+               p.From.Offset = v.AuxInt
+               p.To.Type = obj.TYPE_REG
+               p.To.Reg = v.Args[0].Reg()
+       case ssa.OpAMD64TESTQconst, ssa.OpAMD64TESTLconst, ssa.OpAMD64TESTWconst, ssa.OpAMD64TESTBconst:
                p := s.Prog(v.Op.Asm())
                p.From.Type = obj.TYPE_CONST
                p.From.Offset = v.AuxInt
index ffac45bf66cf0328cdb7dce84aef863a1a6e97cb..cabc8f4d2060fe2993a50e5fb1569a75011cbd70 100644 (file)
 (XORLconst [1] (SETBE x)) -> (SETA  x)
 (XORLconst [1] (SETA  x)) -> (SETBE x)
 
-// Convert BTQconst to BTLconst if possible. It has a shorter encoding.
-(BTQconst [c] x) && c < 32 -> (BTLconst [c] x)
-
 // Special case for floating point - LF/LEF not generated
 (NE (TESTB (SETGF  cmp) (SETGF  cmp)) yes no) -> (UGT  cmp yes no)
 (NE (TESTB (SETGEF cmp) (SETGEF cmp)) yes no) -> (UGE  cmp yes no)
index eb2489ac7717ba9f4b031d077b25023fc5cdd01e..9d1d36d14c96bc592dda5d61352b8ed6ce24e899 100644 (file)
@@ -53,8 +53,6 @@ func rewriteValueAMD64(v *Value) bool {
                return rewriteValueAMD64_OpAMD64ANDQmem_0(v)
        case OpAMD64BSFQ:
                return rewriteValueAMD64_OpAMD64BSFQ_0(v)
-       case OpAMD64BTQconst:
-               return rewriteValueAMD64_OpAMD64BTQconst_0(v)
        case OpAMD64CMOVLCC:
                return rewriteValueAMD64_OpAMD64CMOVLCC_0(v)
        case OpAMD64CMOVLCS:
@@ -3336,23 +3334,6 @@ func rewriteValueAMD64_OpAMD64BSFQ_0(v *Value) bool {
        }
        return false
 }
-func rewriteValueAMD64_OpAMD64BTQconst_0(v *Value) bool {
-       // match: (BTQconst [c] x)
-       // cond: c < 32
-       // result: (BTLconst [c] x)
-       for {
-               c := v.AuxInt
-               x := v.Args[0]
-               if !(c < 32) {
-                       break
-               }
-               v.reset(OpAMD64BTLconst)
-               v.AuxInt = c
-               v.AddArg(x)
-               return true
-       }
-       return false
-}
 func rewriteValueAMD64_OpAMD64CMOVLCC_0(v *Value) bool {
        // match: (CMOVLCC x y (InvertFlags cond))
        // cond: