extern volatile intgo runtime·MemProfileRate;
static MSpan* largealloc(uint32, uintptr*);
-static void profilealloc(void *v, uintptr size, uintptr typ);
+static void profilealloc(void *v, uintptr size);
static void settype(MSpan *s, void *v, uintptr typ);
// Allocate an object of at least size bytes.
runtime·racemalloc(v, size);
if(runtime·debug.allocfreetrace)
- goto profile;
+ runtime·tracealloc(v, size, typ);
if(!(flag & FlagNoProfiling) && (rate = runtime·MemProfileRate) > 0) {
if(size < rate && size < c->next_sample)
c->next_sample -= size;
- else {
- profile:
- profilealloc(v, size, typ);
- }
+ else
+ profilealloc(v, size);
}
m->locks--;
}
static void
-profilealloc(void *v, uintptr size, uintptr typ)
+profilealloc(void *v, uintptr size)
{
uintptr rate;
int32 next;
next = 0;
c->next_sample = next;
}
- runtime·MProf_Malloc(v, size, typ);
+ runtime·MProf_Malloc(v, size);
}
void*
if(size < TinySize)
runtime·throw("freeing too small block");
+ if(runtime·debug.allocfreetrace)
+ runtime·tracefree(v, size);
+
// Ensure that the span is swept.
// If we free into an unswept span, we will corrupt GC bitmaps.
runtime·MSpan_EnsureSwept(s);
void runtime·purgecachedstats(MCache*);
void* runtime·cnew(Type*);
void* runtime·cnewarray(Type*, intgo);
+void runtime·tracealloc(void*, uintptr, uintptr);
+void runtime·tracefree(void*, uintptr);
+void runtime·tracegc(void);
uintptr runtime·gettype(void*);
FlagNoInvokeGC = 1<<4, // don't invoke GC
};
-void runtime·MProf_Malloc(void*, uintptr, uintptr);
-void runtime·MProf_Free(Bucket*, void*, uintptr, bool);
+void runtime·MProf_Malloc(void*, uintptr);
+void runtime·MProf_Free(Bucket*, uintptr, bool);
void runtime·MProf_GC(void);
-void runtime·MProf_TraceGC(void);
int32 runtime·gcprocs(void);
void runtime·helpgc(int32 nproc);
void runtime·gchelper(void);
void runtime·memorydump(void);
int32 runtime·setgcpercent(int32);
+
+// Value we use to mark dead pointers when GODEBUG=gcdead=1.
+#define PoisonPtr ((uintptr)0x6969696969696969LL)
switch(bits) {
case BitsDead:
if(runtime·debug.gcdead)
- *(uintptr*)scanp = (uintptr)0x6969696969696969LL;
+ *(uintptr*)scanp = PoisonPtr;
break;
case BitsScalar:
break;
case BitsPointer:
p = *(byte**)scanp;
if(p != nil) {
- if(precise && p < (byte*)PageSize) {
+ if(precise && (p < (byte*)PageSize || (uintptr)p == PoisonPtr)) {
// Looks like a junk value in a pointer slot.
// Liveness analysis wrong?
m->traceback = 2;
continue;
}
+ if(runtime·debug.allocfreetrace)
+ runtime·tracefree(p, size);
+
// Clear mark and scan bits.
*bitp &= ~((bitScan|bitMarked)<<shift);
{
uint32 nproc;
+ m->traceback = 2;
gchelperstart();
// parallel mark for over gc roots
nproc = work.nproc; // work.nproc can change right after we increment work.ndone
if(runtime·xadd(&work.ndone, +1) == nproc-1)
runtime·notewakeup(&work.alldone);
+ m->traceback = 0;
}
static void
m->gcing = 1;
runtime·stoptheworld();
- if(runtime·debug.allocfreetrace)
- runtime·MProf_TraceGC();
-
clearpools();
// Run gc on the g0 stack. We do this so that the g stack
uint32 i;
Eface eface;
+ if(runtime·debug.allocfreetrace)
+ runtime·tracegc();
+
+ m->traceback = 2;
t0 = args->start_time;
work.tstart = args->start_time;
runtime·shrinkstack(runtime·allg[i]);
runtime·MProf_GC();
+ m->traceback = 0;
}
extern uintptr runtime·sizeof_C_MStats;
runtime·unlock(&proflock);
}
-static int8*
-typeinfoname(int32 typeinfo)
-{
- if(typeinfo == TypeInfo_SingleObject)
- return "single object";
- else if(typeinfo == TypeInfo_Array)
- return "array";
- else if(typeinfo == TypeInfo_Chan)
- return "channel";
- runtime·throw("typinfoname: unknown type info");
- return nil;
-}
-
-static void
-printstackframes(uintptr *stk, int32 nstk)
-{
- String file;
- Func *f;
- int8 *name;
- uintptr pc;
- int32 frame;
- int32 line;
-
- for(frame = 0; frame < nstk; frame++) {
- pc = stk[frame];
- f = runtime·findfunc(pc);
- if(f != nil) {
- name = runtime·funcname(f);
- line = runtime·funcline(f, pc, &file);
- runtime·printf("\t#%d %p %s %S:%d\n", frame, pc, name, file, line);
- } else {
- runtime·printf("\t#%d %p\n", frame, pc);
- }
- }
-}
-
-// Called by collector to report a gc in allocfreetrace mode.
-void
-runtime·MProf_TraceGC(void)
-{
- uintptr stk[32];
- int32 nstk;
-
- nstk = runtime·callers(1, stk, nelem(stk));
- runtime·printf("MProf_TraceGC\n");
- printstackframes(stk, nstk);
-}
-
// Called by malloc to record a profiled block.
void
-runtime·MProf_Malloc(void *p, uintptr size, uintptr typ)
+runtime·MProf_Malloc(void *p, uintptr size)
{
uintptr stk[32];
Bucket *b;
- Type *type;
- int8 *name;
int32 nstk;
nstk = runtime·callers(1, stk, nelem(stk));
runtime·lock(&proflock);
- if(runtime·debug.allocfreetrace) {
- type = (Type*)(typ & ~3);
- name = typeinfoname(typ & 3);
- runtime·printf("MProf_Malloc(p=%p, size=%p, type=%p <%s", p, size, type, name);
- if(type != nil)
- runtime·printf(" of %S", *type->string);
- runtime·printf(">)\n");
- printstackframes(stk, nstk);
- }
b = stkbucket(MProf, size, stk, nstk, true);
b->recent_allocs++;
b->recent_alloc_bytes += size;
// Called when freeing a profiled block.
void
-runtime·MProf_Free(Bucket *b, void *p, uintptr size, bool freed)
+runtime·MProf_Free(Bucket *b, uintptr size, bool freed)
{
runtime·lock(&proflock);
if(freed) {
b->prev_frees++;
b->prev_free_bytes += size;
}
- if(runtime·debug.allocfreetrace) {
- runtime·printf("MProf_Free(p=%p, size=%p)\n", p, size);
- printstackframes(b->stk, b->nstk);
- }
runtime·unlock(&proflock);
}
runtime·starttheworld();
}
}
+
+// Tracing of alloc/free/gc.
+
+static Lock tracelock;
+
+static int8*
+typeinfoname(int32 typeinfo)
+{
+ if(typeinfo == TypeInfo_SingleObject)
+ return "single object";
+ else if(typeinfo == TypeInfo_Array)
+ return "array";
+ else if(typeinfo == TypeInfo_Chan)
+ return "channel";
+ runtime·throw("typinfoname: unknown type info");
+ return nil;
+}
+
+void
+runtime·tracealloc(void *p, uintptr size, uintptr typ)
+{
+ int8 *name;
+ Type *type;
+
+ runtime·lock(&tracelock);
+ m->traceback = 2;
+ type = (Type*)(typ & ~3);
+ name = typeinfoname(typ & 3);
+ if(type == nil)
+ runtime·printf("tracealloc(%p, %p, %s)\n", p, size, name);
+ else
+ runtime·printf("tracealloc(%p, %p, %s of %S)\n", p, size, name, *type->string);
+ if(m->curg == nil || g == m->curg) {
+ runtime·goroutineheader(g);
+ runtime·traceback((uintptr)runtime·getcallerpc(&p), (uintptr)runtime·getcallersp(&p), 0, g);
+ } else {
+ runtime·goroutineheader(m->curg);
+ runtime·traceback(~(uintptr)0, ~(uintptr)0, 0, m->curg);
+ }
+ runtime·printf("\n");
+ m->traceback = 0;
+ runtime·unlock(&tracelock);
+}
+
+void
+runtime·tracefree(void *p, uintptr size)
+{
+ runtime·lock(&tracelock);
+ m->traceback = 2;
+ runtime·printf("tracefree(%p, %p)\n", p, size);
+ runtime·goroutineheader(g);
+ runtime·traceback((uintptr)runtime·getcallerpc(&p), (uintptr)runtime·getcallersp(&p), 0, g);
+ runtime·printf("\n");
+ m->traceback = 0;
+ runtime·unlock(&tracelock);
+}
+
+void
+runtime·tracegc(void)
+{
+ runtime·lock(&tracelock);
+ m->traceback = 2;
+ runtime·printf("tracegc()\n");
+ // running on m->g0 stack; show all non-g0 goroutines
+ runtime·tracebackothers(g);
+ runtime·printf("end tracegc\n");
+ runtime·printf("\n");
+ m->traceback = 0;
+ runtime·unlock(&tracelock);
+}