package runtime
import (
)
const (
traceAllocFreeTypesBatch = iota
traceAllocFreeInfoBatch
)
func ( uintptr) {
assertWorldStopped()
var bool
:= unsafeTraceExpWriter(, nil, traceExperimentAllocFree)
, = .ensure(1 + 4*traceBytesPerNumber)
if {
.byte(byte(traceAllocFreeInfoBatch))
}
.varint(uint64(trace.minPageHeapAddr))
.varint(uint64(pageSize))
.varint(uint64(minHeapAlign))
.varint(uint64(fixedStack))
.flush().end()
:= traceAcquire()
if !.ok() {
throw("traceSnapshotMemory: tracing is not enabled")
}
for , := range mheap_.allspans {
if .state.get() == mSpanDead {
continue
}
.SpanExists()
if .state.get() != mSpanInUse {
continue
}
:= .allocBitsForIndex(0)
for := uintptr(0); < uintptr(.nelems); ++ {
if .index < uintptr(.freeindex) || .isMarked() {
:= .base() + *.elemsize
.HeapObjectExists(, .typePointersOfUnchecked().typ)
}
.advance()
}
}
forEachGRace(func( *g) {
.GoroutineStackExists(.stack.lo, .stack.hi-.stack.lo)
})
traceRelease()
}
func ( *mspan) traceArg {
if .state.get() == mSpanInUse {
return traceArg(.spanclass) << 1
}
return traceArg(1)
}
func ( traceLocker) ( *mspan) {
.eventWriter(traceGoRunning, traceProcRunning).event(traceEvSpan, traceSpanID(), traceArg(.npages), traceSpanTypeAndClass())
}
func ( traceLocker) ( *mspan) {
.eventWriter(traceGoRunning, traceProcRunning).event(traceEvSpanAlloc, traceSpanID(), traceArg(.npages), traceSpanTypeAndClass())
}
func ( traceLocker) ( *mspan) {
.eventWriter(traceGoRunning, traceProcRunning).event(traceEvSpanFree, traceSpanID())
}
func ( *mspan) traceArg {
return traceArg(uint64(.base())-trace.minPageHeapAddr) / pageSize
}
func ( traceLocker) ( uintptr, *abi.Type) {
.eventWriter(traceGoRunning, traceProcRunning).event(traceEvHeapObject, traceHeapObjectID(), .rtype())
}
func ( traceLocker) ( uintptr, *abi.Type) {
.eventWriter(traceGoRunning, traceProcRunning).event(traceEvHeapObjectAlloc, traceHeapObjectID(), .rtype())
}
func ( traceLocker) ( uintptr) {
.eventWriter(traceGoRunning, traceProcRunning).event(traceEvHeapObjectFree, traceHeapObjectID())
}
func ( uintptr) traceArg {
return traceArg(uint64()-trace.minPageHeapAddr) / minHeapAlign
}
func ( traceLocker) (, uintptr) {
:= traceCompressStackSize()
.eventWriter(traceGoRunning, traceProcRunning).event(traceEvGoroutineStack, traceGoroutineStackID(), )
}
func ( traceLocker) (, uintptr) {
:= traceCompressStackSize()
.eventWriter(traceGoRunning, traceProcRunning).event(traceEvGoroutineStackAlloc, traceGoroutineStackID(), )
}
func ( traceLocker) ( uintptr) {
.eventWriter(traceGoRunning, traceProcRunning).event(traceEvGoroutineStackFree, traceGoroutineStackID())
}
func ( uintptr) traceArg {
return traceArg(uint64()-trace.minPageHeapAddr) / fixedStack
}
func ( uintptr) traceArg {
if &(-1) != 0 {
throw("goroutine stack size is not a power of 2")
}
return traceArg(sys.Len64(uint64()))
}