]> Cypherpunks repositories - gostls13.git/commitdiff
cmd/compile: implement signed loads from read-only memory
authorKeith Randall <khr@golang.org>
Thu, 13 Feb 2025 16:04:03 +0000 (08:04 -0800)
committerGopher Robot <gobot@golang.org>
Thu, 13 Feb 2025 20:27:55 +0000 (12:27 -0800)
In addition to unsigned loads which already exist.

This helps code that does switches on strings to constant-fold
the switch away when the string being switched on is constant.

Fixes #71699

Change-Id: If3051af0f7255d2a573da6f96b153a987a7f159d
Reviewed-on: https://go-review.googlesource.com/c/go/+/649295
LUCI-TryBot-Result: Go LUCI <golang-scoped@luci-project-accounts.iam.gserviceaccount.com>
Reviewed-by: Cuong Manh Le <cuong.manhle.vn@gmail.com>
Reviewed-by: Dmitri Shuralyov <dmitshur@google.com>
Reviewed-by: Keith Randall <khr@google.com>
Auto-Submit: Keith Randall <khr@google.com>

14 files changed:
src/cmd/compile/internal/ssa/_gen/386.rules
src/cmd/compile/internal/ssa/_gen/AMD64.rules
src/cmd/compile/internal/ssa/_gen/AMD64Ops.go
src/cmd/compile/internal/ssa/_gen/ARM.rules
src/cmd/compile/internal/ssa/_gen/ARM64.rules
src/cmd/compile/internal/ssa/_gen/MIPS64.rules
src/cmd/compile/internal/ssa/_gen/Wasm.rules
src/cmd/compile/internal/ssa/rewrite386.go
src/cmd/compile/internal/ssa/rewriteAMD64.go
src/cmd/compile/internal/ssa/rewriteARM.go
src/cmd/compile/internal/ssa/rewriteARM64.go
src/cmd/compile/internal/ssa/rewriteMIPS64.go
src/cmd/compile/internal/ssa/rewriteWasm.go
test/codegen/switch.go

index 67cfa3460aa3e179912eccc910228ac8cbd452f7..216f5c2e2e79eb1cfb3d8e8da03d333c85acaf76 100644 (file)
 (MOVBload [off] {sym} (SB) _) && symIsRO(sym) => (MOVLconst [int32(read8(sym, int64(off)))])
 (MOVWload [off] {sym} (SB) _) && symIsRO(sym) => (MOVLconst [int32(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))])
 (MOVLload [off] {sym} (SB) _) && symIsRO(sym) => (MOVLconst [int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))])
+(MOVBLSXload [off] {sym} (SB) _) && symIsRO(sym) => (MOVLconst [int32(int8(read8(sym, int64(off))))])
+(MOVWLSXload [off] {sym} (SB) _) && symIsRO(sym) => (MOVLconst [int32(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
index ba7f181f5e49097ba428efe0193a4748c2da5644..0e429b5be74dcb05315b9c10ee24a9fe9c904fbd 100644 (file)
 
 (MOVBload [off] {sym} (SB) _) && symIsRO(sym) => (MOVLconst [int32(read8(sym, int64(off)))])
 (MOVWload [off] {sym} (SB) _) && symIsRO(sym) => (MOVLconst [int32(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))])
-(MOVLload [off] {sym} (SB) _) && symIsRO(sym) => (MOVQconst [int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))])
+(MOVLload [off] {sym} (SB) _) && symIsRO(sym) => (MOVLconst [int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))])
 (MOVQload [off] {sym} (SB) _) && symIsRO(sym) => (MOVQconst [int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder))])
+(MOVBQSXload [off] {sym} (SB) _) && symIsRO(sym) => (MOVQconst [int64(int8(read8(sym, int64(off))))])
+(MOVWQSXload [off] {sym} (SB) _) && symIsRO(sym) => (MOVQconst [int64(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
+(MOVLQSXload [off] {sym} (SB) _) && symIsRO(sym) => (MOVQconst [int64(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
+
+
 (MOVOstore [dstOff] {dstSym} ptr (MOVOload [srcOff] {srcSym} (SB) _) mem) && symIsRO(srcSym) =>
   (MOVQstore [dstOff+8] {dstSym} ptr (MOVQconst [int64(read64(srcSym, int64(srcOff)+8, config.ctxt.Arch.ByteOrder))])
     (MOVQstore [dstOff] {dstSym} ptr (MOVQconst [int64(read64(srcSym, int64(srcOff), config.ctxt.Arch.ByteOrder))]) mem))
index 7be70c7737240336c379bdf7acb80b2c2d3b1196..53df7af3059a522a8f19a175f01fc6597a508e89 100644 (file)
@@ -758,7 +758,7 @@ func init() {
                {name: "MOVLQSX", argLength: 1, reg: gp11, asm: "MOVLQSX"}, // sign extend arg0 from int32 to int64
                {name: "MOVLQZX", argLength: 1, reg: gp11, asm: "MOVL"},    // zero extend arg0 from int32 to int64
 
-               {name: "MOVLconst", reg: gp01, asm: "MOVL", typ: "UInt32", aux: "Int32", rematerializeable: true}, // 32 low bits of auxint
+               {name: "MOVLconst", reg: gp01, asm: "MOVL", typ: "UInt32", aux: "Int32", rematerializeable: true}, // 32 low bits of auxint (upper 32 are zeroed)
                {name: "MOVQconst", reg: gp01, asm: "MOVQ", typ: "UInt64", aux: "Int64", rematerializeable: true}, // auxint
 
                {name: "CVTTSD2SL", argLength: 1, reg: fpgp, asm: "CVTTSD2SL"}, // convert float64 to int32
index 9cdb5d8ad541c986f211b254ef56c182a8c3dda5..a3bb2c312f9c16f5907827c55c211ab8f92206dd 100644 (file)
 (MOVBUload [off] {sym} (SB) _) && symIsRO(sym) => (MOVWconst [int32(read8(sym, int64(off)))])
 (MOVHUload [off] {sym} (SB) _) && symIsRO(sym) => (MOVWconst [int32(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))])
 (MOVWload [off] {sym} (SB) _) && symIsRO(sym) => (MOVWconst [int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))])
+(MOVBload [off] {sym} (SB) _) && symIsRO(sym) => (MOVWconst [int32(int8(read8(sym, int64(off))))])
+(MOVHload [off] {sym} (SB) _) && symIsRO(sym) => (MOVWconst [int32(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
index 6652d2ec014c262f92e81e813a6aacff1c238ed6..3696e17d9ce957d9cbf48739795efaa9c46bb176 100644 (file)
 (MOVHUload [off] {sym} (SB) _) && symIsRO(sym) => (MOVDconst [int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))])
 (MOVWUload [off] {sym} (SB) _) && symIsRO(sym) => (MOVDconst [int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))])
 (MOVDload  [off] {sym} (SB) _) && symIsRO(sym) => (MOVDconst [int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder))])
+(MOVBload  [off] {sym} (SB) _) && symIsRO(sym) => (MOVDconst [int64(int8(read8(sym, int64(off))))])
+(MOVHload  [off] {sym} (SB) _) && symIsRO(sym) => (MOVDconst [int64(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
+(MOVWload  [off] {sym} (SB) _) && symIsRO(sym) => (MOVDconst [int64(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
 
 // Prefetch instructions (aux is option: 0 - PLDL1KEEP; 1 - PLDL1STRM)
 (PrefetchCache addr mem)         => (PRFM [0] addr mem)
index 8aed350039ab7eeef26e64988df8e116c9ecc554..cc3985ecdd0a6fc66a378a094a591f79fd606549 100644 (file)
 (SGTU x x) => (MOVVconst [0])
 
 // fold readonly sym load
-(MOVBload [off] {sym} (SB) _) && symIsRO(sym) => (MOVVconst [int64(read8(sym, int64(off)))])
-(MOVHload [off] {sym} (SB) _) && symIsRO(sym) => (MOVVconst [int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))])
-(MOVWload [off] {sym} (SB) _) && symIsRO(sym) => (MOVVconst [int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))])
+(MOVBUload [off] {sym} (SB) _) && symIsRO(sym) => (MOVVconst [int64(read8(sym, int64(off)))])
+(MOVHUload [off] {sym} (SB) _) && symIsRO(sym) => (MOVVconst [int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))])
+(MOVWUload [off] {sym} (SB) _) && symIsRO(sym) => (MOVVconst [int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))])
 (MOVVload [off] {sym} (SB) _) && symIsRO(sym) => (MOVVconst [int64(read64(sym, int64(off), config.ctxt.Arch.ByteOrder))])
+(MOVBload [off] {sym} (SB) _) && symIsRO(sym) => (MOVVconst [int64(int8(read8(sym, int64(off))))])
+(MOVHload [off] {sym} (SB) _) && symIsRO(sym) => (MOVVconst [int64(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
+(MOVWload [off] {sym} (SB) _) && symIsRO(sym) => (MOVVconst [int64(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
index 91a9fc5e4a9772b3e5e337c5c1078170700a1dd7..08cadabe0ea4c24b73ef48bd35bf484f9a389ade 100644 (file)
 (I64Load32U [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+int64(off2)) => (I64Const [int64(read32(sym, off+int64(off2), config.ctxt.Arch.ByteOrder))])
 (I64Load16U [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+int64(off2)) => (I64Const [int64(read16(sym, off+int64(off2), config.ctxt.Arch.ByteOrder))])
 (I64Load8U [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+int64(off2)) => (I64Const [int64(read8(sym, off+int64(off2)))])
+(I64Load32S [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+int64(off2)) => (I64Const [int64(int32(read32(sym, off+int64(off2), config.ctxt.Arch.ByteOrder)))])
+(I64Load16S [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+int64(off2)) => (I64Const [int64(int16(read16(sym, off+int64(off2), config.ctxt.Arch.ByteOrder)))])
+(I64Load8S [off] (LoweredAddr {sym} [off2] (SB)) _) && symIsRO(sym) && isU32Bit(off+int64(off2)) => (I64Const [int64(int8(read8(sym, off+int64(off2))))])
index 9f1645f8c33dccada460dead6dd09c8b2cb290f0..dbc1335fcdc338ecf373eecb1fdc64129c619531 100644 (file)
@@ -3491,6 +3491,19 @@ func rewriteValue386_Op386MOVBLSXload(v *Value) bool {
                v.AddArg2(base, mem)
                return true
        }
+       // match: (MOVBLSXload [off] {sym} (SB) _)
+       // cond: symIsRO(sym)
+       // result: (MOVLconst [int32(int8(read8(sym, int64(off))))])
+       for {
+               off := auxIntToInt32(v.AuxInt)
+               sym := auxToSym(v.Aux)
+               if v_0.Op != OpSB || !(symIsRO(sym)) {
+                       break
+               }
+               v.reset(Op386MOVLconst)
+               v.AuxInt = int32ToAuxInt(int32(int8(read8(sym, int64(off)))))
+               return true
+       }
        return false
 }
 func rewriteValue386_Op386MOVBLZX(v *Value) bool {
@@ -4672,6 +4685,19 @@ func rewriteValue386_Op386MOVWLSXload(v *Value) bool {
                v.AddArg2(base, mem)
                return true
        }
+       // match: (MOVWLSXload [off] {sym} (SB) _)
+       // cond: symIsRO(sym)
+       // result: (MOVLconst [int32(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
+       for {
+               off := auxIntToInt32(v.AuxInt)
+               sym := auxToSym(v.Aux)
+               if v_0.Op != OpSB || !(symIsRO(sym)) {
+                       break
+               }
+               v.reset(Op386MOVLconst)
+               v.AuxInt = int32ToAuxInt(int32(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))))
+               return true
+       }
        return false
 }
 func rewriteValue386_Op386MOVWLZX(v *Value) bool {
index 28041ea76d1953a22af0cea69f9bf71ca42f84fe..9ea1114d45e9b0cc8e4bf8df87680cb2c1bcb2de 100644 (file)
@@ -9668,6 +9668,19 @@ func rewriteValueAMD64_OpAMD64MOVBQSXload(v *Value) bool {
                v.AddArg2(base, mem)
                return true
        }
+       // match: (MOVBQSXload [off] {sym} (SB) _)
+       // cond: symIsRO(sym)
+       // result: (MOVQconst [int64(int8(read8(sym, int64(off))))])
+       for {
+               off := auxIntToInt32(v.AuxInt)
+               sym := auxToSym(v.Aux)
+               if v_0.Op != OpSB || !(symIsRO(sym)) {
+                       break
+               }
+               v.reset(OpAMD64MOVQconst)
+               v.AuxInt = int64ToAuxInt(int64(int8(read8(sym, int64(off)))))
+               return true
+       }
        return false
 }
 func rewriteValueAMD64_OpAMD64MOVBQZX(v *Value) bool {
@@ -10412,6 +10425,8 @@ func rewriteValueAMD64_OpAMD64MOVLQSX(v *Value) bool {
 func rewriteValueAMD64_OpAMD64MOVLQSXload(v *Value) bool {
        v_1 := v.Args[1]
        v_0 := v.Args[0]
+       b := v.Block
+       config := b.Func.Config
        // match: (MOVLQSXload [off] {sym} ptr (MOVLstore [off2] {sym2} ptr2 x _))
        // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
        // result: (MOVLQSX x)
@@ -10455,6 +10470,19 @@ func rewriteValueAMD64_OpAMD64MOVLQSXload(v *Value) bool {
                v.AddArg2(base, mem)
                return true
        }
+       // match: (MOVLQSXload [off] {sym} (SB) _)
+       // cond: symIsRO(sym)
+       // result: (MOVQconst [int64(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
+       for {
+               off := auxIntToInt32(v.AuxInt)
+               sym := auxToSym(v.Aux)
+               if v_0.Op != OpSB || !(symIsRO(sym)) {
+                       break
+               }
+               v.reset(OpAMD64MOVQconst)
+               v.AuxInt = int64ToAuxInt(int64(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))))
+               return true
+       }
        return false
 }
 func rewriteValueAMD64_OpAMD64MOVLQZX(v *Value) bool {
@@ -10742,15 +10770,15 @@ func rewriteValueAMD64_OpAMD64MOVLload(v *Value) bool {
        }
        // match: (MOVLload [off] {sym} (SB) _)
        // cond: symIsRO(sym)
-       // result: (MOVQconst [int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))])
+       // result: (MOVLconst [int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))])
        for {
                off := auxIntToInt32(v.AuxInt)
                sym := auxToSym(v.Aux)
                if v_0.Op != OpSB || !(symIsRO(sym)) {
                        break
                }
-               v.reset(OpAMD64MOVQconst)
-               v.AuxInt = int64ToAuxInt(int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))
+               v.reset(OpAMD64MOVLconst)
+               v.AuxInt = int32ToAuxInt(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))
                return true
        }
        return false
@@ -12792,6 +12820,8 @@ func rewriteValueAMD64_OpAMD64MOVWQSX(v *Value) bool {
 func rewriteValueAMD64_OpAMD64MOVWQSXload(v *Value) bool {
        v_1 := v.Args[1]
        v_0 := v.Args[0]
+       b := v.Block
+       config := b.Func.Config
        // match: (MOVWQSXload [off] {sym} ptr (MOVWstore [off2] {sym2} ptr2 x _))
        // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
        // result: (MOVWQSX x)
@@ -12835,6 +12865,19 @@ func rewriteValueAMD64_OpAMD64MOVWQSXload(v *Value) bool {
                v.AddArg2(base, mem)
                return true
        }
+       // match: (MOVWQSXload [off] {sym} (SB) _)
+       // cond: symIsRO(sym)
+       // result: (MOVQconst [int64(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
+       for {
+               off := auxIntToInt32(v.AuxInt)
+               sym := auxToSym(v.Aux)
+               if v_0.Op != OpSB || !(symIsRO(sym)) {
+                       break
+               }
+               v.reset(OpAMD64MOVQconst)
+               v.AuxInt = int64ToAuxInt(int64(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))))
+               return true
+       }
        return false
 }
 func rewriteValueAMD64_OpAMD64MOVWQZX(v *Value) bool {
index 09be5ccf685b617a5df63b217b198385091fea50..8dfa9ab6d6f20a09c4b27c05301359af3feea168 100644 (file)
@@ -4863,6 +4863,19 @@ func rewriteValueARM_OpARMMOVBload(v *Value) bool {
                v.AddArg3(ptr, idx, mem)
                return true
        }
+       // match: (MOVBload [off] {sym} (SB) _)
+       // cond: symIsRO(sym)
+       // result: (MOVWconst [int32(int8(read8(sym, int64(off))))])
+       for {
+               off := auxIntToInt32(v.AuxInt)
+               sym := auxToSym(v.Aux)
+               if v_0.Op != OpSB || !(symIsRO(sym)) {
+                       break
+               }
+               v.reset(OpARMMOVWconst)
+               v.AuxInt = int32ToAuxInt(int32(int8(read8(sym, int64(off)))))
+               return true
+       }
        return false
 }
 func rewriteValueARM_OpARMMOVBloadidx(v *Value) bool {
@@ -5700,6 +5713,8 @@ func rewriteValueARM_OpARMMOVHUreg(v *Value) bool {
 func rewriteValueARM_OpARMMOVHload(v *Value) bool {
        v_1 := v.Args[1]
        v_0 := v.Args[0]
+       b := v.Block
+       config := b.Func.Config
        // match: (MOVHload [off1] {sym} (ADDconst [off2] ptr) mem)
        // result: (MOVHload [off1+off2] {sym} ptr mem)
        for {
@@ -5798,6 +5813,19 @@ func rewriteValueARM_OpARMMOVHload(v *Value) bool {
                v.AddArg3(ptr, idx, mem)
                return true
        }
+       // match: (MOVHload [off] {sym} (SB) _)
+       // cond: symIsRO(sym)
+       // result: (MOVWconst [int32(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
+       for {
+               off := auxIntToInt32(v.AuxInt)
+               sym := auxToSym(v.Aux)
+               if v_0.Op != OpSB || !(symIsRO(sym)) {
+                       break
+               }
+               v.reset(OpARMMOVWconst)
+               v.AuxInt = int32ToAuxInt(int32(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))))
+               return true
+       }
        return false
 }
 func rewriteValueARM_OpARMMOVHloadidx(v *Value) bool {
index 6fabb77c0d804a3be174419beed2d07a1da743da..def0003764d3c5d24639ac2a27d0aa7ebb7b30b4 100644 (file)
@@ -8718,6 +8718,19 @@ func rewriteValueARM64_OpARM64MOVBload(v *Value) bool {
                v.AuxInt = int64ToAuxInt(0)
                return true
        }
+       // match: (MOVBload [off] {sym} (SB) _)
+       // cond: symIsRO(sym)
+       // result: (MOVDconst [int64(int8(read8(sym, int64(off))))])
+       for {
+               off := auxIntToInt32(v.AuxInt)
+               sym := auxToSym(v.Aux)
+               if v_0.Op != OpSB || !(symIsRO(sym)) {
+                       break
+               }
+               v.reset(OpARM64MOVDconst)
+               v.AuxInt = int64ToAuxInt(int64(int8(read8(sym, int64(off)))))
+               return true
+       }
        return false
 }
 func rewriteValueARM64_OpARM64MOVBloadidx(v *Value) bool {
@@ -10563,6 +10576,19 @@ func rewriteValueARM64_OpARM64MOVHload(v *Value) bool {
                v.AuxInt = int64ToAuxInt(0)
                return true
        }
+       // match: (MOVHload [off] {sym} (SB) _)
+       // cond: symIsRO(sym)
+       // result: (MOVDconst [int64(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
+       for {
+               off := auxIntToInt32(v.AuxInt)
+               sym := auxToSym(v.Aux)
+               if v_0.Op != OpSB || !(symIsRO(sym)) {
+                       break
+               }
+               v.reset(OpARM64MOVDconst)
+               v.AuxInt = int64ToAuxInt(int64(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))))
+               return true
+       }
        return false
 }
 func rewriteValueARM64_OpARM64MOVHloadidx(v *Value) bool {
@@ -11978,6 +12004,19 @@ func rewriteValueARM64_OpARM64MOVWload(v *Value) bool {
                v.AuxInt = int64ToAuxInt(0)
                return true
        }
+       // match: (MOVWload [off] {sym} (SB) _)
+       // cond: symIsRO(sym)
+       // result: (MOVDconst [int64(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
+       for {
+               off := auxIntToInt32(v.AuxInt)
+               sym := auxToSym(v.Aux)
+               if v_0.Op != OpSB || !(symIsRO(sym)) {
+                       break
+               }
+               v.reset(OpARM64MOVDconst)
+               v.AuxInt = int64ToAuxInt(int64(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))))
+               return true
+       }
        return false
 }
 func rewriteValueARM64_OpARM64MOVWloadidx(v *Value) bool {
index bad8016cb4b04af4c08df21cf80b153617f3182a..b82f027a5a801ad5b034cea6aacf020d07d5f5cc 100644 (file)
@@ -2802,6 +2802,19 @@ func rewriteValueMIPS64_OpMIPS64MOVBUload(v *Value) bool {
                v.AddArg2(ptr, mem)
                return true
        }
+       // match: (MOVBUload [off] {sym} (SB) _)
+       // cond: symIsRO(sym)
+       // result: (MOVVconst [int64(read8(sym, int64(off)))])
+       for {
+               off := auxIntToInt32(v.AuxInt)
+               sym := auxToSym(v.Aux)
+               if v_0.Op != OpSB || !(symIsRO(sym)) {
+                       break
+               }
+               v.reset(OpMIPS64MOVVconst)
+               v.AuxInt = int64ToAuxInt(int64(read8(sym, int64(off))))
+               return true
+       }
        return false
 }
 func rewriteValueMIPS64_OpMIPS64MOVBUreg(v *Value) bool {
@@ -2891,7 +2904,7 @@ func rewriteValueMIPS64_OpMIPS64MOVBload(v *Value) bool {
        }
        // match: (MOVBload [off] {sym} (SB) _)
        // cond: symIsRO(sym)
-       // result: (MOVVconst [int64(read8(sym, int64(off)))])
+       // result: (MOVVconst [int64(int8(read8(sym, int64(off))))])
        for {
                off := auxIntToInt32(v.AuxInt)
                sym := auxToSym(v.Aux)
@@ -2899,7 +2912,7 @@ func rewriteValueMIPS64_OpMIPS64MOVBload(v *Value) bool {
                        break
                }
                v.reset(OpMIPS64MOVVconst)
-               v.AuxInt = int64ToAuxInt(int64(read8(sym, int64(off))))
+               v.AuxInt = int64ToAuxInt(int64(int8(read8(sym, int64(off)))))
                return true
        }
        return false
@@ -3484,6 +3497,19 @@ func rewriteValueMIPS64_OpMIPS64MOVHUload(v *Value) bool {
                v.AddArg2(ptr, mem)
                return true
        }
+       // match: (MOVHUload [off] {sym} (SB) _)
+       // cond: symIsRO(sym)
+       // result: (MOVVconst [int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))])
+       for {
+               off := auxIntToInt32(v.AuxInt)
+               sym := auxToSym(v.Aux)
+               if v_0.Op != OpSB || !(symIsRO(sym)) {
+                       break
+               }
+               v.reset(OpMIPS64MOVVconst)
+               v.AuxInt = int64ToAuxInt(int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))
+               return true
+       }
        return false
 }
 func rewriteValueMIPS64_OpMIPS64MOVHUreg(v *Value) bool {
@@ -3595,7 +3621,7 @@ func rewriteValueMIPS64_OpMIPS64MOVHload(v *Value) bool {
        }
        // match: (MOVHload [off] {sym} (SB) _)
        // cond: symIsRO(sym)
-       // result: (MOVVconst [int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))])
+       // result: (MOVVconst [int64(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
        for {
                off := auxIntToInt32(v.AuxInt)
                sym := auxToSym(v.Aux)
@@ -3603,7 +3629,7 @@ func rewriteValueMIPS64_OpMIPS64MOVHload(v *Value) bool {
                        break
                }
                v.reset(OpMIPS64MOVVconst)
-               v.AuxInt = int64ToAuxInt(int64(read16(sym, int64(off), config.ctxt.Arch.ByteOrder)))
+               v.AuxInt = int64ToAuxInt(int64(int16(read16(sym, int64(off), config.ctxt.Arch.ByteOrder))))
                return true
        }
        return false
@@ -4202,6 +4228,19 @@ func rewriteValueMIPS64_OpMIPS64MOVWUload(v *Value) bool {
                v.AddArg2(ptr, mem)
                return true
        }
+       // match: (MOVWUload [off] {sym} (SB) _)
+       // cond: symIsRO(sym)
+       // result: (MOVVconst [int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))])
+       for {
+               off := auxIntToInt32(v.AuxInt)
+               sym := auxToSym(v.Aux)
+               if v_0.Op != OpSB || !(symIsRO(sym)) {
+                       break
+               }
+               v.reset(OpMIPS64MOVVconst)
+               v.AuxInt = int64ToAuxInt(int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))
+               return true
+       }
        return false
 }
 func rewriteValueMIPS64_OpMIPS64MOVWUreg(v *Value) bool {
@@ -4335,7 +4374,7 @@ func rewriteValueMIPS64_OpMIPS64MOVWload(v *Value) bool {
        }
        // match: (MOVWload [off] {sym} (SB) _)
        // cond: symIsRO(sym)
-       // result: (MOVVconst [int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))])
+       // result: (MOVVconst [int64(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))])
        for {
                off := auxIntToInt32(v.AuxInt)
                sym := auxToSym(v.Aux)
@@ -4343,7 +4382,7 @@ func rewriteValueMIPS64_OpMIPS64MOVWload(v *Value) bool {
                        break
                }
                v.reset(OpMIPS64MOVVconst)
-               v.AuxInt = int64ToAuxInt(int64(read32(sym, int64(off), config.ctxt.Arch.ByteOrder)))
+               v.AuxInt = int64ToAuxInt(int64(int32(read32(sym, int64(off), config.ctxt.Arch.ByteOrder))))
                return true
        }
        return false
index 6f83aea13afc5a656c5f98136cae6a0f7ce2b2b1..e0d753185f6649f10a2b50a431978ec7f3bf0c50 100644 (file)
@@ -3899,6 +3899,8 @@ func rewriteValueWasm_OpWasmI64Load(v *Value) bool {
 func rewriteValueWasm_OpWasmI64Load16S(v *Value) bool {
        v_1 := v.Args[1]
        v_0 := v.Args[0]
+       b := v.Block
+       config := b.Func.Config
        // match: (I64Load16S [off] (I64AddConst [off2] ptr) mem)
        // cond: isU32Bit(off+off2)
        // result: (I64Load16S [off+off2] ptr mem)
@@ -3918,6 +3920,24 @@ func rewriteValueWasm_OpWasmI64Load16S(v *Value) bool {
                v.AddArg2(ptr, mem)
                return true
        }
+       // match: (I64Load16S [off] (LoweredAddr {sym} [off2] (SB)) _)
+       // cond: symIsRO(sym) && isU32Bit(off+int64(off2))
+       // result: (I64Const [int64(int16(read16(sym, off+int64(off2), config.ctxt.Arch.ByteOrder)))])
+       for {
+               off := auxIntToInt64(v.AuxInt)
+               if v_0.Op != OpWasmLoweredAddr {
+                       break
+               }
+               off2 := auxIntToInt32(v_0.AuxInt)
+               sym := auxToSym(v_0.Aux)
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpSB || !(symIsRO(sym) && isU32Bit(off+int64(off2))) {
+                       break
+               }
+               v.reset(OpWasmI64Const)
+               v.AuxInt = int64ToAuxInt(int64(int16(read16(sym, off+int64(off2), config.ctxt.Arch.ByteOrder))))
+               return true
+       }
        return false
 }
 func rewriteValueWasm_OpWasmI64Load16U(v *Value) bool {
@@ -3967,6 +3987,8 @@ func rewriteValueWasm_OpWasmI64Load16U(v *Value) bool {
 func rewriteValueWasm_OpWasmI64Load32S(v *Value) bool {
        v_1 := v.Args[1]
        v_0 := v.Args[0]
+       b := v.Block
+       config := b.Func.Config
        // match: (I64Load32S [off] (I64AddConst [off2] ptr) mem)
        // cond: isU32Bit(off+off2)
        // result: (I64Load32S [off+off2] ptr mem)
@@ -3986,6 +4008,24 @@ func rewriteValueWasm_OpWasmI64Load32S(v *Value) bool {
                v.AddArg2(ptr, mem)
                return true
        }
+       // match: (I64Load32S [off] (LoweredAddr {sym} [off2] (SB)) _)
+       // cond: symIsRO(sym) && isU32Bit(off+int64(off2))
+       // result: (I64Const [int64(int32(read32(sym, off+int64(off2), config.ctxt.Arch.ByteOrder)))])
+       for {
+               off := auxIntToInt64(v.AuxInt)
+               if v_0.Op != OpWasmLoweredAddr {
+                       break
+               }
+               off2 := auxIntToInt32(v_0.AuxInt)
+               sym := auxToSym(v_0.Aux)
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpSB || !(symIsRO(sym) && isU32Bit(off+int64(off2))) {
+                       break
+               }
+               v.reset(OpWasmI64Const)
+               v.AuxInt = int64ToAuxInt(int64(int32(read32(sym, off+int64(off2), config.ctxt.Arch.ByteOrder))))
+               return true
+       }
        return false
 }
 func rewriteValueWasm_OpWasmI64Load32U(v *Value) bool {
@@ -4054,6 +4094,24 @@ func rewriteValueWasm_OpWasmI64Load8S(v *Value) bool {
                v.AddArg2(ptr, mem)
                return true
        }
+       // match: (I64Load8S [off] (LoweredAddr {sym} [off2] (SB)) _)
+       // cond: symIsRO(sym) && isU32Bit(off+int64(off2))
+       // result: (I64Const [int64(int8(read8(sym, off+int64(off2))))])
+       for {
+               off := auxIntToInt64(v.AuxInt)
+               if v_0.Op != OpWasmLoweredAddr {
+                       break
+               }
+               off2 := auxIntToInt32(v_0.AuxInt)
+               sym := auxToSym(v_0.Aux)
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpSB || !(symIsRO(sym) && isU32Bit(off+int64(off2))) {
+                       break
+               }
+               v.reset(OpWasmI64Const)
+               v.AuxInt = int64ToAuxInt(int64(int8(read8(sym, off+int64(off2)))))
+               return true
+       }
        return false
 }
 func rewriteValueWasm_OpWasmI64Load8U(v *Value) bool {
index 980ea7056192f3581c6b20eee17e1ed201f40d38..509343110a32db307298a5759703df827b9daa02 100644 (file)
@@ -183,3 +183,17 @@ func interfaceConv(x IJ) I {
        // arm64:`CALL\truntime.typeAssert`,`LDAR`,`MOVWU\t16\(R0\)`,`MOVD\t\(R.*\)\(R.*\)`
        return x
 }
+
+// Make sure we can constant fold after inlining. See issue 71699.
+func stringSwitchInlineable(s string) {
+       switch s {
+       case "foo", "bar", "baz", "goo":
+       default:
+               println("no")
+       }
+}
+func stringSwitch() {
+       // amd64:-"CMP",-"CALL"
+       // arm64:-"CMP",-"CALL"
+       stringSwitchInlineable("foo")
+}