)
func TestTraceAnnotations(t *testing.T) {
- testTraceProg(t, "annotations.go", func(t *testing.T, tb, _ []byte, _ bool) {
+ testTraceProg(t, "annotations.go", func(t *testing.T, tb, _ []byte, _ string) {
type evDesc struct {
kind trace.EventKind
task trace.TaskID
}
func TestTraceCPUProfile(t *testing.T) {
- testTraceProg(t, "cpu-profile.go", func(t *testing.T, tb, stderr []byte, _ bool) {
+ testTraceProg(t, "cpu-profile.go", func(t *testing.T, tb, stderr []byte, _ string) {
// Parse stderr which has a CPU profile summary, if everything went well.
// (If it didn't, we shouldn't even make it here.)
scanner := bufio.NewScanner(bytes.NewReader(stderr))
}
func TestTraceFutileWakeup(t *testing.T) {
- testTraceProg(t, "futile-wakeup.go", func(t *testing.T, tb, _ []byte, _ bool) {
+ testTraceProg(t, "futile-wakeup.go", func(t *testing.T, tb, _ []byte, _ string) {
// Check to make sure that no goroutine in the "special" trace region
// ends up blocking, unblocking, then immediately blocking again.
//
}
func TestTraceStacks(t *testing.T) {
- testTraceProg(t, "stacks.go", func(t *testing.T, tb, _ []byte, stress bool) {
+ testTraceProg(t, "stacks.go", func(t *testing.T, tb, _ []byte, godebug string) {
type frame struct {
fn string
line int
{"runtime.GOMAXPROCS", 0},
{"main.main", 0},
}},
- {trace.EventStateTransition, "Goroutine Running->Runnable", []frame{
+ }
+ asyncPreemptOff := strings.Contains(godebug, "asyncpreemptoff=1")
+ if asyncPreemptOff {
+ // Only check for syncPreemptPoint if asynchronous preemption is disabled.
+ // Otherwise, the syncPreemptPoint goroutine might be exclusively async
+ // preempted, causing this test to flake.
+ syncPreemptEv := evDesc{trace.EventStateTransition, "Goroutine Running->Runnable", []frame{
{"main.syncPreemptPoint", 0},
{"main.main.func12", 0},
- }},
+ }}
+ want = append(want, syncPreemptEv)
}
+ stress := strings.Contains(godebug, "traceadvanceperiod=0")
if !stress {
// Only check for this stack if !stress because traceAdvance alone could
// allocate enough memory to trigger a GC if called frequently enough.
testTraceProg(t, "iter-pull.go", nil)
}
-func checkReaderDeterminism(t *testing.T, tb, _ []byte, _ bool) {
+func checkReaderDeterminism(t *testing.T, tb, _ []byte, _ string) {
events := func() []trace.Event {
var evs []trace.Event
}
}
-func testTraceProg(t *testing.T, progName string, extra func(t *testing.T, trace, stderr []byte, stress bool)) {
+func testTraceProg(t *testing.T, progName string, extra func(t *testing.T, trace, stderr []byte, godebug string)) {
testenv.MustHaveGoRun(t)
// Check if we're on a builder.
// Run some extra validation.
if !t.Failed() && extra != nil {
- extra(t, tb, errBuf.Bytes(), stress)
+ extra(t, tb, errBuf.Bytes(), godebug)
}
// Dump some more information on failure.
t.Run("Default", func(t *testing.T) {
runTest(t, false, "")
})
+ t.Run("AsyncPreemptOff", func(t *testing.T) {
+ runTest(t, false, "asyncpreemptoff=1")
+ })
t.Run("Stress", func(t *testing.T) {
if testing.Short() {
t.Skip("skipping trace stress tests in short mode")