}
for i := 0; i < b.N; i++ {
- fun := Fun(c, DummyFrontend{b}, "entry", Bloc("entry", values...))
+ fun := c.Fun("entry", Bloc("entry", values...))
Copyelim(fun.f)
}
}
// construct lots of values with args that have aux values and place
// them in an order that triggers the bug
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("start", OpInitMem, TypeMem, 0, nil),
Valu("sp", OpSP, TypeBytePtr, 0, nil),
func TestZCSE(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("start", OpInitMem, TypeMem, 0, nil),
Valu("sp", OpSP, TypeBytePtr, 0, nil),
func TestDeadLoop(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Goto("exit")),
func TestDeadValue(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("deadval", OpConst64, TypeInt64, 37, nil),
func TestNeverTaken(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("cond", OpConstBool, TypeBool, 0, nil),
Valu("mem", OpInitMem, TypeMem, 0, nil),
func TestNestedDeadBlocks(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("cond", OpConstBool, TypeBool, 0, nil),
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
- fun := Fun(c, DummyFrontend{b}, "entry", blocks...)
+ fun := c.Fun("entry", blocks...)
Deadcode(fun.f)
}
})
c := testConfig(t)
elemType := &TypeImpl{Size_: 1, Name: "testtype"}
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr", Elem_: elemType} // dummy for testing
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("start", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
// make sure we don't get into an infinite loop with phi values.
c := testConfig(t)
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("start", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
c := testConfig(t)
t1 := &TypeImpl{Size_: 8, Ptr: true, Name: "t1"}
t2 := &TypeImpl{Size_: 4, Ptr: true, Name: "t2"}
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("start", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
// can get to a point where the size is changed but type unchanged.
c := testConfig(t)
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("start", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
func benchmarkDominators(b *testing.B, size int, bg blockGen) {
c := testConfig(b)
- fun := Fun(c, DummyFrontend{b}, "entry", bg(size)...)
+ fun := c.Fun("entry", bg(size)...)
CheckFunc(fun.f)
b.SetBytes(int64(size))
func TestDominatorsSingleBlock(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Exit("mem")))
func TestDominatorsSimple(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Goto("a")),
func TestDominatorsMultPredFwd(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("p", OpConstBool, TypeBool, 1, nil),
func TestDominatorsDeadCode(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("p", OpConstBool, TypeBool, 0, nil),
func TestDominatorsMultPredRev(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Goto("first")),
Bloc("first",
func TestDominatorsMultPred(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("p", OpConstBool, TypeBool, 1, nil),
func TestInfiniteLoop(t *testing.T) {
c := testConfig(t)
// note lack of an exit block
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("p", OpConstBool, TypeBool, 1, nil),
b := 1 & i >> 1
c := 1 & i >> 2
- fun := Fun(testConfig(t), DummyFrontend{t}, "1",
+ cfg := testConfig(t)
+ fun := cfg.Fun("1",
Bloc("1",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("p", OpConstBool, TypeBool, 1, nil),
func TestDominatorsPostTricky(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "b1",
+ fun := c.Fun("b1",
Bloc("b1",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("p", OpConstBool, TypeBool, 1, nil),
var Opt = opt
var Deadcode = deadcode
var Copyelim = copyelim
-var TestCtxt = obj.Linknew(&x86.Linkamd64)
-func testConfig(t testing.TB) *Config {
- return NewConfig("amd64", dummyTypes, TestCtxt, true)
+var testCtxts = map[string]*obj.Link{
+ "amd64": obj.Linknew(&x86.Linkamd64),
+ "s390x": obj.Linknew(&s390x.Links390x),
}
-func testConfigS390X(t testing.TB) *Config {
- return NewConfig("s390x", dummyTypes, obj.Linknew(&s390x.Links390x), true)
+func testConfig(tb testing.TB) *Conf { return testConfigArch(tb, "amd64") }
+func testConfigS390X(tb testing.TB) *Conf { return testConfigArch(tb, "s390x") }
+
+func testConfigArch(tb testing.TB, arch string) *Conf {
+ ctxt, ok := testCtxts[arch]
+ if !ok {
+ tb.Fatalf("unknown arch %s", arch)
+ }
+ if ctxt.Arch.IntSize != 8 {
+ tb.Fatal("dummyTypes is 64-bit only")
+ }
+ c := &Conf{
+ config: NewConfig(arch, dummyTypes, ctxt, true),
+ tb: tb,
+ }
+ return c
+}
+
+type Conf struct {
+ config *Config
+ tb testing.TB
+ fe Frontend
+}
+
+func (c *Conf) Frontend() Frontend {
+ if c.fe == nil {
+ c.fe = DummyFrontend{t: c.tb, ctxt: c.config.ctxt}
+ }
+ return c.fe
}
// DummyFrontend is a test-only frontend.
// It assumes 64 bit integers and pointers.
type DummyFrontend struct {
- t testing.TB
+ t testing.TB
+ ctxt *obj.Link
}
type DummyAuto struct {
}
func (DummyFrontend) AllocFrame(f *Func) {
}
-func (DummyFrontend) Syslook(s string) *obj.LSym {
- return obj.Linklookup(TestCtxt, s, 0)
+func (d DummyFrontend) Syslook(s string) *obj.LSym {
+ return obj.Linklookup(d.ctxt, s, 0)
}
func (DummyFrontend) UseWriteBarrier() bool {
return true // only writebarrier_test cares
// returns a fun containing the composed Func. entry must be a name
// supplied to one of the Bloc functions. Each of the bloc names and
// valu names should be unique across the Fun.
-func Fun(c *Config, fe Frontend, entry string, blocs ...bloc) fun {
- f := NewFunc(fe)
- f.Config = c
+func (c *Conf) Fun(entry string, blocs ...bloc) fun {
+ f := NewFunc(c.Frontend())
+ f.Config = c.config
// TODO: Either mark some SSA tests as t.Parallel,
// or set up a shared Cache and Reset it between tests.
// But not both.
func TestArgs(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("a", OpConst64, TypeInt64, 14, nil),
Valu("b", OpConst64, TypeInt64, 26, nil),
}
func TestEquiv(t *testing.T) {
+ cfg := testConfig(t)
equivalentCases := []struct{ f, g fun }{
// simple case
{
- Fun(testConfig(t), DummyFrontend{t}, "entry",
+ cfg.Fun("entry",
Bloc("entry",
Valu("a", OpConst64, TypeInt64, 14, nil),
Valu("b", OpConst64, TypeInt64, 26, nil),
Goto("exit")),
Bloc("exit",
Exit("mem"))),
- Fun(testConfig(t), DummyFrontend{t}, "entry",
+ cfg.Fun("entry",
Bloc("entry",
Valu("a", OpConst64, TypeInt64, 14, nil),
Valu("b", OpConst64, TypeInt64, 26, nil),
},
// block order changed
{
- Fun(testConfig(t), DummyFrontend{t}, "entry",
+ cfg.Fun("entry",
Bloc("entry",
Valu("a", OpConst64, TypeInt64, 14, nil),
Valu("b", OpConst64, TypeInt64, 26, nil),
Goto("exit")),
Bloc("exit",
Exit("mem"))),
- Fun(testConfig(t), DummyFrontend{t}, "entry",
+ cfg.Fun("entry",
Bloc("exit",
Exit("mem")),
Bloc("entry",
differentCases := []struct{ f, g fun }{
// different shape
{
- Fun(testConfig(t), DummyFrontend{t}, "entry",
+ cfg.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Goto("exit")),
Bloc("exit",
Exit("mem"))),
- Fun(testConfig(t), DummyFrontend{t}, "entry",
+ cfg.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Exit("mem"))),
},
// value order changed
{
- Fun(testConfig(t), DummyFrontend{t}, "entry",
+ cfg.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("b", OpConst64, TypeInt64, 26, nil),
Valu("a", OpConst64, TypeInt64, 14, nil),
Exit("mem"))),
- Fun(testConfig(t), DummyFrontend{t}, "entry",
+ cfg.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("a", OpConst64, TypeInt64, 14, nil),
},
// value auxint different
{
- Fun(testConfig(t), DummyFrontend{t}, "entry",
+ cfg.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("a", OpConst64, TypeInt64, 14, nil),
Exit("mem"))),
- Fun(testConfig(t), DummyFrontend{t}, "entry",
+ cfg.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("a", OpConst64, TypeInt64, 26, nil),
},
// value aux different
{
- Fun(testConfig(t), DummyFrontend{t}, "entry",
+ cfg.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("a", OpConst64, TypeInt64, 0, 14),
Exit("mem"))),
- Fun(testConfig(t), DummyFrontend{t}, "entry",
+ cfg.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("a", OpConst64, TypeInt64, 0, 26),
},
// value args different
{
- Fun(testConfig(t), DummyFrontend{t}, "entry",
+ cfg.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("a", OpConst64, TypeInt64, 14, nil),
Valu("b", OpConst64, TypeInt64, 26, nil),
Valu("sum", OpAdd64, TypeInt64, 0, nil, "a", "b"),
Exit("mem"))),
- Fun(testConfig(t), DummyFrontend{t}, "entry",
+ cfg.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("a", OpConst64, TypeInt64, 0, nil),
// TestConstCache ensures that the cache will not return
// reused free'd values with a non-matching AuxInt
func TestConstCache(t *testing.T) {
- f := Fun(testConfig(t), DummyFrontend{t}, "entry",
+ c := testConfig(t)
+ f := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Exit("mem")))
func TestFuseEliminatesOneBranch(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
func TestFuseEliminatesBothBranches(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
func TestFuseHandlesPhis(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
func TestFuseEliminatesEmptyBlocks(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
b.ResetTimer()
for i := 0; i < b.N; i++ {
- fun := Fun(c, DummyFrontend{b}, "entry", blocks...)
+ fun := c.Fun("entry", blocks...)
fuse(fun.f)
}
})
func testLCAgen(t *testing.T, bg blockGen, size int) {
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry", bg(size)...)
+ fun := c.Fun("entry", bg(size)...)
CheckFunc(fun.f)
if size == 4 {
t.Logf(fun.f.String())
// done:
//
c := testConfigS390X(t)
- fe := DummyFrontend{t}
- fun := Fun(c, fe, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("SP", OpSP, TypeUInt64, 0, nil),
Valu("ret", OpAddr, TypeInt64Ptr, 0, nil, "SP"),
- Valu("N", OpArg, TypeInt64, 0, fe.Auto(TypeInt64)),
+ Valu("N", OpArg, TypeInt64, 0, c.Frontend().Auto(TypeInt64)),
Valu("starti", OpConst64, TypeInt64, 0, nil),
Valu("startsum", OpConst64, TypeInt64, 0, nil),
Goto("b1")),
)
c := testConfig(b)
- fun := Fun(c, DummyFrontend{b}, "entry", blocs...)
+ fun := c.Fun("entry", blocs...)
CheckFunc(fun.f)
b.SetBytes(int64(depth)) // helps for eyeballing linearity
func TestNilcheckSimple(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
func TestNilcheckDomOrder(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
func TestNilcheckAddr(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
func TestNilcheckAddPtr(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
func TestNilcheckPhi(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
func TestNilcheckKeepRemove(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
func TestNilcheckInFalseBranch(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
func TestNilcheckUser(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
func TestNilcheckBug(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
func benchFnPass(b *testing.B, fn passFunc, size int, bg blockGen) {
b.ReportAllocs()
c := testConfig(b)
- fun := Fun(c, DummyFrontend{b}, "entry", bg(size)...)
+ fun := c.Fun("entry", bg(size)...)
CheckFunc(fun.f)
b.ResetTimer()
for i := 0; i < b.N; i++ {
func benchFnBlock(b *testing.B, fn passFunc, bg blockGen) {
b.ReportAllocs()
c := testConfig(b)
- fun := Fun(c, DummyFrontend{b}, "entry", bg(b.N)...)
+ fun := c.Fun("entry", bg(b.N)...)
CheckFunc(fun.f)
b.ResetTimer()
for i := 0; i < passCount; i++ {
func TestLiveControlOps(t *testing.T) {
c := testConfig(t)
- f := Fun(c, DummyFrontend{t}, "entry",
+ f := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("x", OpAMD64MOVLconst, TypeInt8, 1, nil),
func TestSchedule(t *testing.T) {
c := testConfig(t)
cases := []fun{
- Fun(c, DummyFrontend{t}, "entry",
+ c.Fun("entry",
Bloc("entry",
Valu("mem0", OpInitMem, TypeMem, 0, nil),
Valu("ptr", OpConst64, TypeInt64, 0xABCD, nil),
// In the function below, v2 depends on v3 and v4, v4 depends on v3, and v3 depends on store v5.
// storeOrder did not handle this case correctly.
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem0", OpInitMem, TypeMem, 0, nil),
Valu("a", OpAdd64, TypeInt64, 0, nil, "b", "c"), // v2
func TestShiftConstAMD64(t *testing.T) {
c := testConfig(t)
- fe := DummyFrontend{t}
- fun := makeConstShiftFunc(c, fe, 18, OpLsh64x64, TypeUInt64)
+ fun := makeConstShiftFunc(c, 18, OpLsh64x64, TypeUInt64)
checkOpcodeCounts(t, fun.f, map[Op]int{OpAMD64SHLQconst: 1, OpAMD64CMPQconst: 0, OpAMD64ANDQconst: 0})
- fun = makeConstShiftFunc(c, fe, 66, OpLsh64x64, TypeUInt64)
+ fun = makeConstShiftFunc(c, 66, OpLsh64x64, TypeUInt64)
checkOpcodeCounts(t, fun.f, map[Op]int{OpAMD64SHLQconst: 0, OpAMD64CMPQconst: 0, OpAMD64ANDQconst: 0})
- fun = makeConstShiftFunc(c, fe, 18, OpRsh64Ux64, TypeUInt64)
+ fun = makeConstShiftFunc(c, 18, OpRsh64Ux64, TypeUInt64)
checkOpcodeCounts(t, fun.f, map[Op]int{OpAMD64SHRQconst: 1, OpAMD64CMPQconst: 0, OpAMD64ANDQconst: 0})
- fun = makeConstShiftFunc(c, fe, 66, OpRsh64Ux64, TypeUInt64)
+ fun = makeConstShiftFunc(c, 66, OpRsh64Ux64, TypeUInt64)
checkOpcodeCounts(t, fun.f, map[Op]int{OpAMD64SHRQconst: 0, OpAMD64CMPQconst: 0, OpAMD64ANDQconst: 0})
- fun = makeConstShiftFunc(c, fe, 18, OpRsh64x64, TypeInt64)
+ fun = makeConstShiftFunc(c, 18, OpRsh64x64, TypeInt64)
checkOpcodeCounts(t, fun.f, map[Op]int{OpAMD64SARQconst: 1, OpAMD64CMPQconst: 0})
- fun = makeConstShiftFunc(c, fe, 66, OpRsh64x64, TypeInt64)
+ fun = makeConstShiftFunc(c, 66, OpRsh64x64, TypeInt64)
checkOpcodeCounts(t, fun.f, map[Op]int{OpAMD64SARQconst: 1, OpAMD64CMPQconst: 0})
}
-func makeConstShiftFunc(c *Config, fe Frontend, amount int64, op Op, typ Type) fun {
+func makeConstShiftFunc(c *Conf, amount int64, op Op, typ Type) fun {
ptyp := &TypeImpl{Size_: 8, Ptr: true, Name: "ptr"}
- fun := Fun(c, fe, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("SP", OpSP, TypeUInt64, 0, nil),
{8, OpLsh16x64, OpRsh16x64, TypeInt16},
}
c := testConfig(t)
- fe := DummyFrontend{t}
for _, tc := range tests {
- fun := makeShiftExtensionFunc(c, fe, tc.amount, tc.left, tc.right, tc.typ)
+ fun := makeShiftExtensionFunc(c, tc.amount, tc.left, tc.right, tc.typ)
checkOpcodeCounts(t, fun.f, ops)
}
}
// (rshift (lshift (Const64 [amount])) (Const64 [amount]))
//
// This may be equivalent to a sign or zero extension.
-func makeShiftExtensionFunc(c *Config, fe Frontend, amount int64, lshift, rshift Op, typ Type) fun {
+func makeShiftExtensionFunc(c *Conf, amount int64, lshift, rshift Op, typ Type) fun {
ptyp := &TypeImpl{Size_: 8, Ptr: true, Name: "ptr"}
- fun := Fun(c, fe, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("SP", OpSP, TypeUInt64, 0, nil),
func TestShortCircuit(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("arg1", OpArg, TypeInt64, 0, nil),
// Make sure writebarrier phase works even StoreWB ops are not in dependency order
c := testConfig(t)
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("start", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
// See issue #19067.
c := testConfig(t)
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
- fun := Fun(c, DummyFrontend{t}, "entry",
+ fun := c.Fun("entry",
Bloc("entry",
Valu("start", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),