// as not-pointers or vice-versa because of copy
// elision.
if to.IsPtr() != from.IsPtr() {
- return s.newValue1(ssa.OpConvert, to, x)
+ return s.newValue2(ssa.OpConvert, to, x, s.mem())
}
v := s.newValue1(ssa.OpCopy, to, x) // ensure that v has the right type
p.To.Sym = Linksym(Pkglookup("duffcopy", Runtimepkg))
p.To.Offset = v.AuxInt
- case ssa.OpCopy: // TODO: lower to MOVQ earlier?
+ case ssa.OpCopy, ssa.OpAMD64MOVQconvert: // TODO: lower Copy to MOVQ earlier?
if v.Type.IsMemory() {
return
}
func TestAddressed(t *testing.T) { runTest(t, "addressed_ssa.go") }
func TestCopy(t *testing.T) { runTest(t, "copy_ssa.go") }
+
+func TestUnsafe(t *testing.T) { runTest(t, "unsafe_ssa.go") }
--- /dev/null
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+import (
+ "fmt"
+ "runtime"
+ "unsafe"
+)
+
+// global pointer slot
+var a *[8]uint
+
+// unfoldable true
+var b = true
+
+// Test to make sure that a pointer value which is alive
+// across a call is retained, even when there are matching
+// conversions to/from uintptr around the call.
+// We arrange things very carefully to have to/from
+// conversions on either side of the call which cannot be
+// combined with any other conversions.
+func f_ssa() *[8]uint {
+ // Make x a uintptr pointing to where a points.
+ var x uintptr
+ if b {
+ x = uintptr(unsafe.Pointer(a))
+ } else {
+ x = 0
+ }
+ // Clobber the global pointer. The only live ref
+ // to the allocated object is now x.
+ a = nil
+
+ // Convert to pointer so it should hold
+ // the object live across GC call.
+ p := unsafe.Pointer(x)
+
+ // Call gc.
+ runtime.GC()
+
+ // Convert back to uintptr.
+ y := uintptr(p)
+
+ // Mess with y so that the subsequent cast
+ // to unsafe.Pointer can't be combined with the
+ // uintptr cast above.
+ var z uintptr
+ if b {
+ z = y
+ } else {
+ z = 0
+ }
+ return (*[8]uint)(unsafe.Pointer(z))
+}
+
+// g_ssa is the same as f_ssa, but with a bit of pointer
+// arithmetic for added insanity.
+func g_ssa() *[7]uint {
+ // Make x a uintptr pointing to where a points.
+ var x uintptr
+ if b {
+ x = uintptr(unsafe.Pointer(a))
+ } else {
+ x = 0
+ }
+ // Clobber the global pointer. The only live ref
+ // to the allocated object is now x.
+ a = nil
+
+ // Offset x by one int.
+ x += unsafe.Sizeof(int(0))
+
+ // Convert to pointer so it should hold
+ // the object live across GC call.
+ p := unsafe.Pointer(x)
+
+ // Call gc.
+ runtime.GC()
+
+ // Convert back to uintptr.
+ y := uintptr(p)
+
+ // Mess with y so that the subsequent cast
+ // to unsafe.Pointer can't be combined with the
+ // uintptr cast above.
+ var z uintptr
+ if b {
+ z = y
+ } else {
+ z = 0
+ }
+ return (*[7]uint)(unsafe.Pointer(z))
+}
+
+func testf() {
+ a = new([8]uint)
+ for i := 0; i < 8; i++ {
+ a[i] = 0xabcd
+ }
+ c := f_ssa()
+ for i := 0; i < 8; i++ {
+ if c[i] != 0xabcd {
+ fmt.Printf("%d:%x\n", i, c[i])
+ panic("bad c")
+ }
+ }
+}
+
+func testg() {
+ a = new([8]uint)
+ for i := 0; i < 8; i++ {
+ a[i] = 0xabcd
+ }
+ c := g_ssa()
+ for i := 0; i < 7; i++ {
+ if c[i] != 0xabcd {
+ fmt.Printf("%d:%x\n", i, c[i])
+ panic("bad c")
+ }
+ }
+}
+
+func main() {
+ testf()
+ testg()
+}
(Store [1] ptr val mem) -> (MOVBstore ptr val mem)
// We want this to stick out so the to/from ptr conversion is obvious
-(Convert <t> x) -> (LEAQ <t> x)
+(Convert <t> x mem) -> (MOVQconvert <t> x mem)
// checks
(IsNonNil p) -> (SETNE (TESTQ p p))
{name: "LoweredGetClosurePtr", reg: regInfo{outputs: []regMask{buildReg("DX")}}},
//arg0=ptr,arg1=mem, returns void. Faults if ptr is nil.
{name: "LoweredNilCheck", reg: regInfo{inputs: []regMask{gpsp}, clobbers: flags}},
+
+ // MOVQconvert converts between pointers and integers.
+ // We have a special op for this so as to not confuse GC
+ // (particularly stack maps). It takes a memory arg so it
+ // gets correctly ordered with respect to GC safepoints.
+ // arg0=ptr/int arg1=mem, output=int/ptr
+ {name: "MOVQconvert", reg: gp11nf, asm: "MOVQ"},
}
var AMD64blocks = []blockData{
(If (ConstBool [c]) yes no) && c == 0 -> (First nil no yes)
// Get rid of Convert ops for pointer arithmetic on unsafe.Pointer.
-(Convert (Add64 (Convert ptr) off)) -> (Add64 ptr off)
+(Convert (Add64 (Convert ptr mem) off) mem) -> (Add64 ptr off)
+(Convert (Convert ptr mem) mem) -> ptr
// Decompose compound argument values
(Arg {n} [off]) && v.Type.IsString() ->
{name: "Sqrt"}, // sqrt(arg0), float64 only
// Data movement
- {name: "Phi"}, // select an argument based on which predecessor block we came from
- {name: "Copy"}, // output = arg0
- {name: "Convert"}, // output = arg0 -- a copy that converts to/from a pointer
+ {name: "Phi"}, // select an argument based on which predecessor block we came from
+ {name: "Copy"}, // output = arg0
+ // Convert converts between pointers and integers.
+ // We have a special op for this so as to not confuse GC
+ // (particularly stack maps). It takes a memory arg so it
+ // gets correctly ordered with respect to GC safepoints.
+ // arg0=ptr/int arg1=mem, output=int/ptr
+ {name: "Convert"},
// constants. Constant values are stored in the aux field.
// booleans have a bool aux field, strings have a string aux
OpAMD64LoweredGetG
OpAMD64LoweredGetClosurePtr
OpAMD64LoweredNilCheck
+ OpAMD64MOVQconvert
OpAdd8
OpAdd16
clobbers: 8589934592, // .FLAGS
},
},
+ {
+ name: "MOVQconvert",
+ asm: x86.AMOVQ,
+ reg: regInfo{
+ inputs: []inputInfo{
+ {0, 65535}, // .AX .CX .DX .BX .SP .BP .SI .DI .R8 .R9 .R10 .R11 .R12 .R13 .R14 .R15
+ },
+ outputs: []regMask{
+ 65519, // .AX .CX .DX .BX .BP .SI .DI .R8 .R9 .R10 .R11 .R12 .R13 .R14 .R15
+ },
+ },
+ },
{
name: "Add8",
func rewriteValueAMD64_OpConvert(v *Value, config *Config) bool {
b := v.Block
_ = b
- // match: (Convert <t> x)
+ // match: (Convert <t> x mem)
// cond:
- // result: (LEAQ <t> x)
+ // result: (MOVQconvert <t> x mem)
{
t := v.Type
x := v.Args[0]
- v.Op = OpAMD64LEAQ
+ mem := v.Args[1]
+ v.Op = OpAMD64MOVQconvert
v.AuxInt = 0
v.Aux = nil
v.resetArgs()
v.Type = t
v.AddArg(x)
+ v.AddArg(mem)
return true
}
- goto end1cac40a6074914d6ae3d4aa039a625ed
-end1cac40a6074914d6ae3d4aa039a625ed:
+ goto end0aa5cd28888761ffab21bce45db361c8
+end0aa5cd28888761ffab21bce45db361c8:
;
return false
}
func rewriteValuegeneric_OpConvert(v *Value, config *Config) bool {
b := v.Block
_ = b
- // match: (Convert (Add64 (Convert ptr) off))
+ // match: (Convert (Add64 (Convert ptr mem) off) mem)
// cond:
// result: (Add64 ptr off)
{
if v.Args[0].Op != OpAdd64 {
- goto end913a7ecf456c00ffbee36c2dbbf0e1af
+ goto endbbc9f1666b4d39a130e1b86f109e7c1b
}
if v.Args[0].Args[0].Op != OpConvert {
- goto end913a7ecf456c00ffbee36c2dbbf0e1af
+ goto endbbc9f1666b4d39a130e1b86f109e7c1b
}
ptr := v.Args[0].Args[0].Args[0]
+ mem := v.Args[0].Args[0].Args[1]
off := v.Args[0].Args[1]
+ if v.Args[1] != mem {
+ goto endbbc9f1666b4d39a130e1b86f109e7c1b
+ }
v.Op = OpAdd64
v.AuxInt = 0
v.Aux = nil
v.AddArg(off)
return true
}
- goto end913a7ecf456c00ffbee36c2dbbf0e1af
-end913a7ecf456c00ffbee36c2dbbf0e1af:
+ goto endbbc9f1666b4d39a130e1b86f109e7c1b
+endbbc9f1666b4d39a130e1b86f109e7c1b:
+ ;
+ // match: (Convert (Convert ptr mem) mem)
+ // cond:
+ // result: ptr
+ {
+ if v.Args[0].Op != OpConvert {
+ goto end98c5e0ca257eb216989171786f91b42d
+ }
+ ptr := v.Args[0].Args[0]
+ mem := v.Args[0].Args[1]
+ if v.Args[1] != mem {
+ goto end98c5e0ca257eb216989171786f91b42d
+ }
+ v.Op = OpCopy
+ v.AuxInt = 0
+ v.Aux = nil
+ v.resetArgs()
+ v.Type = ptr.Type
+ v.AddArg(ptr)
+ return true
+ }
+ goto end98c5e0ca257eb216989171786f91b42d
+end98c5e0ca257eb216989171786f91b42d:
;
return false
}