func internal/runtime/atomic.Xadd64
19 uses
internal/runtime/atomic (current package)
atomic_amd64.go#L55: func Xadd64(ptr *uint64, delta int64) uint64
types.go#L344: return Xadd64(&u.value, delta)
runtime
arena.go#L898: atomic.Xadd64(&stats.largeFreeCount, 1)
arena.go#L899: atomic.Xadd64(&stats.largeFree, int64(s.elemsize))
arena.go#L1078: atomic.Xadd64(&stats.largeAlloc, int64(s.elemsize))
arena.go#L1079: atomic.Xadd64(&stats.largeAllocCount, 1)
mcache.go#L165: atomic.Xadd64(&stats.smallAllocCount[spc.sizeclass()], slotsUsed)
mcache.go#L169: atomic.Xadd64(&stats.tinyAllocCount, int64(c.tinyAllocs))
mcache.go#L242: atomic.Xadd64(&stats.largeAlloc, int64(npages*pageSize))
mcache.go#L243: atomic.Xadd64(&stats.largeAllocCount, 1)
mcache.go#L283: atomic.Xadd64(&stats.smallAllocCount[spanClass(i).sizeclass()], slotsUsed)
mcache.go#L310: atomic.Xadd64(&stats.tinyAllocCount, int64(c.tinyAllocs))
mgcsweep.go#L758: atomic.Xadd64(&stats.smallFreeCount[spc.sizeclass()], int64(nfreed))
mgcsweep.go#L793: atomic.Xadd64(&stats.largeFreeCount, 1)
mgcsweep.go#L794: atomic.Xadd64(&stats.largeFree, int64(size))
mgcwork.go#L275: atomic.Xadd64(&work.bytesMarked, int64(w.bytesMarked))
mstats.go#L658: val := atomic.Xadd64((*uint64)(s), n)
proc.go#L1980: atomic.Xadd64(&ncgocall, int64(mp.ncgocall))
runtime1.go#L128: if atomic.Xadd64(&test_z64, (1<<40)+1) != (2<<40)+2 {