const internal/goarch.PtrSize

307 uses

	internal/goarch (current package)
		goarch.go#L31: const PtrSize = 4 << (^uintptr(0) >> 63)
		goarch.go#L47: const Int64Align = PtrSize
		goarch_amd64.go#L12: 	_StackAlign          = PtrSize

	internal/abi
		abi.go#L75: 	if argSize > goarch.PtrSize || argSize == 0 || argSize&(argSize-1) != 0 {
		abi.go#L80: 		offset = goarch.PtrSize - argSize

	internal/reflectlite
		swapper.go#L40: 		if size == goarch.PtrSize {
		value.go#L92: 	if v.typ.size != goarch.PtrSize || !v.typ.pointers() {

	reflect
		abi.go#L171: 		ok = a.assignIntN(0, goarch.PtrSize, 1, 0b1)
		abi.go#L180: 		ok = a.assignIntN(0, goarch.PtrSize, 1, 0b0)
		abi.go#L184: 		a.stackAssign(goarch.PtrSize, goarch.PtrSize)
		abi.go#L206: 		switch goarch.PtrSize {
		abi.go#L219: 		return a.assignIntN(offset, goarch.PtrSize, 2, 0b01)
		abi.go#L221: 		return a.assignIntN(offset, goarch.PtrSize, 2, 0b10)
		abi.go#L223: 		return a.assignIntN(offset, goarch.PtrSize, 3, 0b001)
		abi.go#L266: 	if ptrMap != 0 && size != goarch.PtrSize {
		abi.go#L417: 			spill += goarch.PtrSize
		abi.go#L434: 	spill = align(spill, goarch.PtrSize)
		abi.go#L439: 	retOffset := align(in.stackBytes, goarch.PtrSize)
		swapper.go#L40: 		if size == goarch.PtrSize {
		type.go#L1945: 		mt.keysize = uint8(goarch.PtrSize)
		type.go#L1951: 		mt.valuesize = uint8(goarch.PtrSize)
		type.go#L2252: 	size := bucketSize*(1+ktyp.size+etyp.size) + overflowPad + goarch.PtrSize
		type.go#L2258: 		nptr := (bucketSize*(1+ktyp.size+etyp.size) + goarch.PtrSize) / goarch.PtrSize
		type.go#L2260: 		base := bucketSize / goarch.PtrSize
		type.go#L2265: 		base += bucketSize * ktyp.size / goarch.PtrSize
		type.go#L2270: 		base += bucketSize * etyp.size / goarch.PtrSize
		type.go#L2271: 		base += overflowPad / goarch.PtrSize
		type.go#L2276: 		ptrdata = (word + 1) * goarch.PtrSize
		type.go#L2285: 		align:   goarch.PtrSize,
		type.go#L2309: 	ptrs := typ.ptrdata / goarch.PtrSize
		type.go#L2310: 	words := typ.size / goarch.PtrSize
		type.go#L2333: 	ptrs := typ.ptrdata / goarch.PtrSize
		type.go#L2780: 				n := (ft.offset() - off) / goarch.PtrSize
		type.go#L2957: 	case typ.kind&kindGCProg == 0 && array.size <= maxPtrmaskBytes*8*goarch.PtrSize:
		type.go#L2961: 		mask := make([]byte, (array.ptrdata/goarch.PtrSize+7)/8)
		type.go#L2971: 		elemPtrs := typ.ptrdata / goarch.PtrSize
		type.go#L2972: 		elemWords := typ.size / goarch.PtrSize
		type.go#L3084: 		align: goarch.PtrSize,
		type.go#L3089: 		size:    align(abi.retOffset+abi.ret.stackBytes, goarch.PtrSize),
		type.go#L3090: 		ptrdata: uintptr(abi.stackPtrs.n) * goarch.PtrSize,
		type.go#L3145: 		for bv.n < uint32(offset/uintptr(goarch.PtrSize)) {
		type.go#L3152: 		for bv.n < uint32(offset/uintptr(goarch.PtrSize)) {
		value.go#L97: 	if v.typ.size != goarch.PtrSize || !v.typ.pointers() {
		value.go#L540: 	frameSize = align(frameSize, goarch.PtrSize)
		value.go#L1056: 	methodFrameSize = align(methodFrameSize, goarch.PtrSize)

	runtime
		alg.go#L14: 	c0 = uintptr((8-goarch.PtrSize)/4*2860486313 + (goarch.PtrSize-4)/4*33054211828000289)
		alg.go#L15: 	c1 = uintptr((8-goarch.PtrSize)/4*3267000013 + (goarch.PtrSize-4)/4*23344194077549503)
		alg.go#L303: const hashRandomBytes = goarch.PtrSize / 4 * 64
		alg.go#L324: 	getRandomData((*[len(hashkey) * goarch.PtrSize]byte)(unsafe.Pointer(&hashkey))[:])
		cgocall.go#L491: 		p = *(*unsafe.Pointer)(add(p, goarch.PtrSize))
		cgocall.go#L571: 		for i = uintptr(0); i < n; i += goarch.PtrSize {
		cgocheck.go#L154: 	for i := uintptr(0); i < off+size; i += goarch.PtrSize {
		cgocheck.go#L172: 	skipMask := off / goarch.PtrSize / 8
		cgocheck.go#L173: 	skipBytes := skipMask * goarch.PtrSize * 8
		cgocheck.go#L179: 	for i := uintptr(0); i < size; i += goarch.PtrSize {
		cgocheck.go#L180: 		if i&(goarch.PtrSize*8-1) == 0 {
		cgocheck.go#L187: 			off -= goarch.PtrSize
		heapdump.go#L250: 			dumpint(uint64(offset + i*goarch.PtrSize))
		heapdump.go#L301: 		for off := child.argoff; off < child.argoff+child.arglen; off += goarch.PtrSize {
		heapdump.go#L310: 		for off := child.arglen; off < s.varp-s.sp; off += goarch.PtrSize {
		heapdump.go#L317: 		for off := s.varp - size - s.sp; off < s.varp-s.sp; off += goarch.PtrSize {
		heapdump.go#L324: 		dumpbv(&bv, s.varp-uintptr(bv.n)*goarch.PtrSize-s.sp)
		heapdump.go#L514: 	dumpint(goarch.PtrSize)
		heapdump.go#L729: 	nptr := size / goarch.PtrSize
		iface.go#L67: 	m = (*itab)(persistentalloc(unsafe.Sizeof(itab{})+uintptr(len(inter.mhdr)-1)*goarch.PtrSize, 0, &memstats.other_sys))
		iface.go#L104: 		p := (**itab)(add(unsafe.Pointer(&t.entries), h*goarch.PtrSize))
		iface.go#L137: 		t2 := (*itabTableType)(mallocgc((2+2*t.size)*goarch.PtrSize, nil, true))
		iface.go#L165: 		p := (**itab)(add(unsafe.Pointer(&t.entries), h*goarch.PtrSize))
		iface.go#L485: 		m := *(**itab)(add(unsafe.Pointer(&t.entries), i*goarch.PtrSize))
		malloc.go#L155: 	_NumStackOrders = 4 - goarch.PtrSize/4*goos.IsWindows - 1*goos.IsPlan9
		malloc.go#L262: 	heapArenaBitmapBytes = heapArenaBytes / (goarch.PtrSize * 8 / 2)
		malloc.go#L494: 	if goarch.PtrSize == 8 {
		malloc.go#L741: 			l2 = (*[1 << arenaL2Bits]*heapArena)(persistentalloc(unsafe.Sizeof(*l2), goarch.PtrSize, nil))
		malloc.go#L752: 		r = (*heapArena)(h.heapArenaAlloc.alloc(unsafe.Sizeof(*r), goarch.PtrSize, &memstats.gcMiscSys))
		malloc.go#L754: 			r = (*heapArena)(persistentalloc(unsafe.Sizeof(*r), goarch.PtrSize, &memstats.gcMiscSys))
		malloc.go#L762: 			size := 2 * uintptr(cap(h.allArenas)) * goarch.PtrSize
		malloc.go#L766: 			newArray := (*notInHeap)(persistentalloc(size, goarch.PtrSize, &memstats.gcMiscSys))
		malloc.go#L771: 			*(*notInHeapSlice)(unsafe.Pointer(&h.allArenas)) = notInHeapSlice{newArray, len(h.allArenas), int(size / goarch.PtrSize)}
		malloc.go#L1034: 			} else if goarch.PtrSize == 4 && size == 12 {
		malloc.go#L1453: 		persistent.off = alignUp(goarch.PtrSize, align)
		map.go#L107: 	noCheck = 1<<(8*goarch.PtrSize) - 1
		map.go#L186: 	return uintptr(1) << (b & (goarch.PtrSize*8 - 1))
		map.go#L196: 	top := uint8(hash >> (goarch.PtrSize*8 - 8))
		map.go#L209: 	return *(**bmap)(add(unsafe.Pointer(b), uintptr(t.bucketsize)-goarch.PtrSize))
		map.go#L213: 	*(**bmap)(add(unsafe.Pointer(b), uintptr(t.bucketsize)-goarch.PtrSize)) = ovf
		map.go#L826: 	if unsafe.Sizeof(hiter{})/goarch.PtrSize != 12 {
		map.go#L1296: 	if t.key.size > maxKeySize && (!t.indirectkey() || t.keysize != uint8(goarch.PtrSize)) ||
		map.go#L1300: 	if t.elem.size > maxElemSize && (!t.indirectelem() || t.elemsize != uint8(goarch.PtrSize)) ||
		map_fast32.go#L305: 			if goarch.PtrSize == 4 && t.key.ptrdata != 0 {
		map_fast32.go#L431: 				if goarch.PtrSize == 4 && t.key.ptrdata != 0 && writeBarrier.enabled {
		map_fast64.go#L304: 				if goarch.PtrSize == 8 {
		map_fast64.go#L434: 					if goarch.PtrSize == 8 {
		map_faststr.go#L30: 			for i, kptr := uintptr(0), b.keys(); i < bucketCnt; i, kptr = i+1, add(kptr, 2*goarch.PtrSize) {
		map_faststr.go#L39: 					return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*goarch.PtrSize+i*uintptr(t.elemsize))
		map_faststr.go#L46: 		for i, kptr := uintptr(0), b.keys(); i < bucketCnt; i, kptr = i+1, add(kptr, 2*goarch.PtrSize) {
		map_faststr.go#L55: 				return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*goarch.PtrSize+i*uintptr(t.elemsize))
		map_faststr.go#L72: 			k := (*stringStruct)(add(unsafe.Pointer(b), dataOffset+keymaybe*2*goarch.PtrSize))
		map_faststr.go#L74: 				return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*goarch.PtrSize+keymaybe*uintptr(t.elemsize))
		map_faststr.go#L95: 		for i, kptr := uintptr(0), b.keys(); i < bucketCnt; i, kptr = i+1, add(kptr, 2*goarch.PtrSize) {
		map_faststr.go#L101: 				return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*goarch.PtrSize+i*uintptr(t.elemsize))
		map_faststr.go#L125: 			for i, kptr := uintptr(0), b.keys(); i < bucketCnt; i, kptr = i+1, add(kptr, 2*goarch.PtrSize) {
		map_faststr.go#L134: 					return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*goarch.PtrSize+i*uintptr(t.elemsize)), true
		map_faststr.go#L141: 		for i, kptr := uintptr(0), b.keys(); i < bucketCnt; i, kptr = i+1, add(kptr, 2*goarch.PtrSize) {
		map_faststr.go#L150: 				return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*goarch.PtrSize+i*uintptr(t.elemsize)), true
		map_faststr.go#L167: 			k := (*stringStruct)(add(unsafe.Pointer(b), dataOffset+keymaybe*2*goarch.PtrSize))
		map_faststr.go#L169: 				return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*goarch.PtrSize+keymaybe*uintptr(t.elemsize)), true
		map_faststr.go#L190: 		for i, kptr := uintptr(0), b.keys(); i < bucketCnt; i, kptr = i+1, add(kptr, 2*goarch.PtrSize) {
		map_faststr.go#L196: 				return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*goarch.PtrSize+i*uintptr(t.elemsize)), true
		map_faststr.go#L249: 			k := (*stringStruct)(add(unsafe.Pointer(b), dataOffset+i*2*goarch.PtrSize))
		map_faststr.go#L287: 	insertk = add(unsafe.Pointer(insertb), dataOffset+inserti*2*goarch.PtrSize)
		map_faststr.go#L293: 	elem := add(unsafe.Pointer(insertb), dataOffset+bucketCnt*2*goarch.PtrSize+inserti*uintptr(t.elemsize))
		map_faststr.go#L328: 		for i, kptr := uintptr(0), b.keys(); i < bucketCnt; i, kptr = i+1, add(kptr, 2*goarch.PtrSize) {
		map_faststr.go#L338: 			e := add(unsafe.Pointer(b), dataOffset+bucketCnt*2*goarch.PtrSize+i*uintptr(t.elemsize))
		map_faststr.go#L414: 		x.e = add(x.k, bucketCnt*2*goarch.PtrSize)
		map_faststr.go#L422: 			y.e = add(y.k, bucketCnt*2*goarch.PtrSize)
		map_faststr.go#L427: 			e := add(k, bucketCnt*2*goarch.PtrSize)
		map_faststr.go#L428: 			for i := 0; i < bucketCnt; i, k, e = i+1, add(k, 2*goarch.PtrSize), add(e, uintptr(t.elemsize)) {
		map_faststr.go#L454: 					dst.e = add(dst.k, bucketCnt*2*goarch.PtrSize)
		map_faststr.go#L467: 				dst.k = add(dst.k, 2*goarch.PtrSize)
		mbarrier.go#L204: 	if writeBarrier.needed && typ.ptrdata > off && size >= goarch.PtrSize {
		mbarrier.go#L205: 		if off&(goarch.PtrSize-1) != 0 {
		mbarrier.go#L208: 		pwsize := alignDown(size, goarch.PtrSize)
		mbarrier.go#L232: 	if writeBarrier.needed && typ != nil && typ.ptrdata != 0 && size >= goarch.PtrSize {
		mbitmap.go#L330: 	h.bitp = &ha.bitmap[(addr/(goarch.PtrSize*4))%heapArenaBitmapBytes]
		mbitmap.go#L331: 	h.shift = uint32((addr / goarch.PtrSize) & 3)
		mbitmap.go#L570: 	if (dst|src|size)&(goarch.PtrSize-1) != 0 {
		mbitmap.go#L605: 		for i := uintptr(0); i < size; i += goarch.PtrSize {
		mbitmap.go#L615: 		for i := uintptr(0); i < size; i += goarch.PtrSize {
		mbitmap.go#L638: 	if (dst|src|size)&(goarch.PtrSize-1) != 0 {
		mbitmap.go#L646: 	for i := uintptr(0); i < size; i += goarch.PtrSize {
		mbitmap.go#L666: 	word := maskOffset / goarch.PtrSize
		mbitmap.go#L671: 	for i := uintptr(0); i < size; i += goarch.PtrSize {
		mbitmap.go#L676: 				i += 7 * goarch.PtrSize
		mbitmap.go#L733: 	for i := uintptr(0); i < typ.ptrdata; i += goarch.PtrSize {
		mbitmap.go#L734: 		if i&(goarch.PtrSize*8-1) == 0 {
		mbitmap.go#L764: 	nw := (s.npages << _PageShift) / goarch.PtrSize
		mbitmap.go#L771: 	isPtrs := goarch.PtrSize == 8 && s.elemsize == goarch.PtrSize
		mbitmap.go#L849: 	if goarch.PtrSize == 8 && size == goarch.PtrSize {
		mbitmap.go#L875: 	if size == 2*goarch.PtrSize {
		mbitmap.go#L876: 		if typ.size == goarch.PtrSize {
		mbitmap.go#L885: 			if goarch.PtrSize == 4 && dataSize == goarch.PtrSize {
		mbitmap.go#L899: 			if typ.size != 2*goarch.PtrSize || typ.kind&kindGCProg != 0 {
		mbitmap.go#L906: 		hb |= bitScanAll & ((bitScan << (typ.ptrdata / goarch.PtrSize)) - 1)
		mbitmap.go#L912: 	} else if size == 3*goarch.PtrSize {
		mbitmap.go#L919: 			if goarch.PtrSize != 8 {
		mbitmap.go#L925: 			if typ.size == 2*goarch.PtrSize {
		mbitmap.go#L930: 		if typ.size == goarch.PtrSize {
		mbitmap.go#L1076: 		const maxBits = goarch.PtrSize*8 - 7
		mbitmap.go#L1077: 		if typ.ptrdata/goarch.PtrSize <= maxBits {
		mbitmap.go#L1088: 			nb = typ.ptrdata / goarch.PtrSize
		mbitmap.go#L1093: 			nb = typ.size / goarch.PtrSize
		mbitmap.go#L1104: 				for endnb <= goarch.PtrSize*8 {
		mbitmap.go#L1123: 			n := (typ.ptrdata/goarch.PtrSize+7)/8 - 1
		mbitmap.go#L1125: 			endnb = typ.size/goarch.PtrSize - n*8
		mbitmap.go#L1136: 		nw = typ.ptrdata / goarch.PtrSize
		mbitmap.go#L1141: 		nw = ((dataSize/typ.size-1)*typ.size + typ.ptrdata) / goarch.PtrSize
		mbitmap.go#L1304: 	nw = size / goarch.PtrSize
		mbitmap.go#L1338: 		cnw := size / goarch.PtrSize
		mbitmap.go#L1403: 		end := heapBitsForAddr(x + size - goarch.PtrSize)
		mbitmap.go#L1430: 		nptr := typ.ptrdata / goarch.PtrSize
		mbitmap.go#L1431: 		ndata := typ.size / goarch.PtrSize
		mbitmap.go#L1433: 		totalptr := ((count-1)*typ.size + typ.ptrdata) / goarch.PtrSize
		mbitmap.go#L1434: 		for i := uintptr(0); i < size/goarch.PtrSize; i++ {
		mbitmap.go#L1459: 				println("at word", i, "offset", i*goarch.PtrSize, "have", hex(have), "want", hex(want))
		mbitmap.go#L1490: 	if goarch.PtrSize == 8 && allocSize%(4*goarch.PtrSize) != 0 {
		mbitmap.go#L1497: 		if totalBits*goarch.PtrSize != progSize {
		mbitmap.go#L1512: 		if n := elemSize/goarch.PtrSize - progSize/goarch.PtrSize; n > 0 {
		mbitmap.go#L1534: 		n := elemSize / goarch.PtrSize
		mbitmap.go#L1558: 		totalBits = (elemSize*(count-1) + progSize) / goarch.PtrSize
		mbitmap.go#L1561: 	endAlloc := unsafe.Pointer(addb(h.bitp, allocSize/goarch.PtrSize/wordsPerBitmapByte))
		mbitmap.go#L1569: 	n := (size/goarch.PtrSize + 7) / 8
		mbitmap.go#L1704: 		const maxBits = goarch.PtrSize*8 - 7
		mbitmap.go#L1757: 					for nb <= goarch.PtrSize*8 {
		mbitmap.go#L1885: 	bitmapBytes := divRoundUp(ptrdata, 8*goarch.PtrSize)
		mbitmap.go#L1958: 	nptr := typ.ptrdata / goarch.PtrSize
		mbitmap.go#L1978: 			mask = make([]byte, n/goarch.PtrSize)
		mbitmap.go#L1979: 			for i := uintptr(0); i < n; i += goarch.PtrSize {
		mbitmap.go#L1980: 				off := (uintptr(p) + i - datap.data) / goarch.PtrSize
		mbitmap.go#L1981: 				mask[i/goarch.PtrSize] = (*addb(bitmap, off/8) >> (off % 8)) & 1
		mbitmap.go#L1990: 			mask = make([]byte, n/goarch.PtrSize)
		mbitmap.go#L1991: 			for i := uintptr(0); i < n; i += goarch.PtrSize {
		mbitmap.go#L1992: 				off := (uintptr(p) + i - datap.bss) / goarch.PtrSize
		mbitmap.go#L1993: 				mask[i/goarch.PtrSize] = (*addb(bitmap, off/8) >> (off % 8)) & 1
		mbitmap.go#L2003: 		mask = make([]byte, n/goarch.PtrSize)
		mbitmap.go#L2004: 		for i := uintptr(0); i < n; i += goarch.PtrSize {
		mbitmap.go#L2006: 				mask[i/goarch.PtrSize] = 1
		mbitmap.go#L2009: 				mask = mask[:i/goarch.PtrSize]
		mbitmap.go#L2028: 			size := uintptr(locals.n) * goarch.PtrSize
		mbitmap.go#L2030: 			mask = make([]byte, n/goarch.PtrSize)
		mbitmap.go#L2031: 			for i := uintptr(0); i < n; i += goarch.PtrSize {
		mbitmap.go#L2032: 				off := (uintptr(p) + i - frame.varp + size) / goarch.PtrSize
		mbitmap.go#L2033: 				mask[i/goarch.PtrSize] = locals.ptrbit(off)
		mcheckmark.go#L27: type checkmarksMap [heapArenaBytes / goarch.PtrSize / 8]uint8
		mfinal.go#L29: 	fin     [(_FinBlockSize - 2*goarch.PtrSize - 2*4) / unsafe.Sizeof(finalizer{})]finalizer
		mfinal.go#L36: var finptrmask [_FinBlockSize / goarch.PtrSize / 8]byte
		mfinal.go#L98: 				if (unsafe.Sizeof(finalizer{}) != 5*goarch.PtrSize ||
		mfinal.go#L100: 					unsafe.Offsetof(finalizer{}.arg) != goarch.PtrSize ||
		mfinal.go#L101: 					unsafe.Offsetof(finalizer{}.nret) != 2*goarch.PtrSize ||
		mfinal.go#L102: 					unsafe.Offsetof(finalizer{}.fint) != 3*goarch.PtrSize ||
		mfinal.go#L103: 					unsafe.Offsetof(finalizer{}.ot) != 4*goarch.PtrSize) {
		mfinal.go#L429: 	nret = alignUp(nret, goarch.PtrSize)
		mgcmark.go#L268: 	if rootBlockBytes%(8*goarch.PtrSize) != 0 {
		mgcmark.go#L281: 	ptrmask := (*uint8)(add(unsafe.Pointer(ptrmask0), uintptr(shard)*(rootBlockBytes/(8*goarch.PtrSize))))
		mgcmark.go#L396: 				scanblock(uintptr(unsafe.Pointer(&spf.fn)), goarch.PtrSize, &oneptrmask[0], gcw, nil)
		mgcmark.go#L775: 		scanblock(uintptr(unsafe.Pointer(&gp.sched.ctxt)), goarch.PtrSize, &oneptrmask[0], gcw, &state)
		mgcmark.go#L793: 			scanblock(uintptr(unsafe.Pointer(&d.fn)), goarch.PtrSize, &oneptrmask[0], gcw, &state)
		mgcmark.go#L798: 			scanblock(uintptr(unsafe.Pointer(&d.link)), goarch.PtrSize, &oneptrmask[0], gcw, &state)
		mgcmark.go#L804: 			scanblock(uintptr(unsafe.Pointer(&d)), goarch.PtrSize, &oneptrmask[0], gcw, &state)
		mgcmark.go#L949: 		size := uintptr(locals.n) * goarch.PtrSize
		mgcmark.go#L955: 		scanblock(frame.argp, uintptr(args.n)*goarch.PtrSize, args.bytedata, gcw, state)
		mgcmark.go#L1205: 		bits := uint32(*addb(ptrmask, i/(goarch.PtrSize*8)))
		mgcmark.go#L1207: 			i += goarch.PtrSize * 8
		mgcmark.go#L1223: 			i += goarch.PtrSize
		mgcmark.go#L1290: 	for i = 0; i < n; i, hbits = i+goarch.PtrSize, hbits.next() {
		mgcmark.go#L1339: 				word := (p - b) / goarch.PtrSize
		mgcmark.go#L1364: 	for i := uintptr(0); i < n; i += goarch.PtrSize {
		mgcmark.go#L1366: 			word := i / goarch.PtrSize
		mgcmark.go#L1375: 				if i%(goarch.PtrSize*8) != 0 {
		mgcmark.go#L1378: 				i += goarch.PtrSize*8 - goarch.PtrSize
		mgcmark.go#L1440: 	if obj&(goarch.PtrSize-1) != 0 {
		mgcmark.go#L1512: 		size = off + goarch.PtrSize
		mgcmark.go#L1514: 	for i := uintptr(0); i < size; i += goarch.PtrSize {
		mgcmark.go#L1518: 		if !(i < 128*goarch.PtrSize || off-16*goarch.PtrSize < i && i < off+16*goarch.PtrSize) {
		mgcstack.go#L110: 	obj [(_WorkbufSize - unsafe.Sizeof(stackWorkBufHdr{})) / goarch.PtrSize]uintptr
		mgcwork.go#L326: 	obj [(_WorkbufSize - unsafe.Sizeof(workbufhdr{})) / goarch.PtrSize]uintptr
		mheap.go#L497: 		n := 64 * 1024 / goarch.PtrSize
		mheap.go#L503: 		sp.array = sysAlloc(uintptr(n)*goarch.PtrSize, &memstats.other_sys)
		mheap.go#L1836: 			scanblock(uintptr(unsafe.Pointer(&s.fn)), goarch.PtrSize, &oneptrmask[0], gcw, nil)
		mranges.go#L170: 	ranges.array = (*notInHeap)(persistentalloc(unsafe.Sizeof(addrRange{})*uintptr(ranges.cap), goarch.PtrSize, sysStat))
		mranges.go#L297: 			ranges.array = (*notInHeap)(persistentalloc(unsafe.Sizeof(addrRange{})*uintptr(ranges.cap), goarch.PtrSize, a.sysStat))
		mranges.go#L367: 		ranges.array = (*notInHeap)(persistentalloc(unsafe.Sizeof(addrRange{})*uintptr(ranges.cap), goarch.PtrSize, b.sysStat))
		mspanset.go#L85: 		blockp := add(spine, goarch.PtrSize*top)
		mspanset.go#L105: 			newSpine := persistentalloc(newCap*goarch.PtrSize, cpu.CacheLineSize, &memstats.gcMiscSys)
		mspanset.go#L109: 				memmove(newSpine, b.spine, b.spineCap*goarch.PtrSize)
		mspanset.go#L127: 		blockp := add(b.spine, goarch.PtrSize*top)
		mspanset.go#L184: 	blockp := add(spine, goarch.PtrSize*uintptr(top))
		mspanset.go#L244: 		blockp := (**spanSetBlock)(add(b.spine, goarch.PtrSize*uintptr(top)))
		mstats.go#L716: 	_ [(goarch.PtrSize / 4) % 2]uint32
		mwbbuf.go#L148: 	b.next += 2 * goarch.PtrSize
		os_linux.go#L230: 	auxv := (*[1 << 28]uintptr)(add(unsafe.Pointer(argv), uintptr(n)*goarch.PtrSize))
		preempt.go#L323: 	asyncPreemptStack = uintptr(total) + 8*goarch.PtrSize
		print.go#L274: 	for i := uintptr(0); p+i < end; i += goarch.PtrSize {
		proc.go#L155: 	if goarch.PtrSize == 8 {
		proc.go#L568: 	return *(**g)(add(unsafe.Pointer(ptr), i*goarch.PtrSize))
		proc.go#L1899: 	gp.sched.sp -= 4 * goarch.PtrSize // extra space in case of reads slightly beyond frame
		proc.go#L4094: 	totalSize := uintptr(4*goarch.PtrSize + sys.MinFrameSize) // extra space in case of reads slightly beyond frame
		proc.go#L6197: 			p := add(unsafe.Pointer(t), (3+i)*goarch.PtrSize)
		proc.go#L6218: 		firstFunc := add(unsafe.Pointer(t), (3+t.ndeps)*goarch.PtrSize)
		proc.go#L6220: 			p := add(firstFunc, i*goarch.PtrSize)
		runtime1.go#L58: 	return *(**byte)(add(unsafe.Pointer(argv), uintptr(i)*goarch.PtrSize))
		runtime1.go#L193: 	if unsafe.Sizeof(k) != goarch.PtrSize {
		runtime1.go#L196: 	if unsafe.Sizeof(l) != goarch.PtrSize {
		runtime2.go#L510: 	tlsSize  = tlsSlots * goarch.PtrSize
		runtime2.go#L919: 		for i := 0; i < goarch.PtrSize && n < len(r); i++ {
		signal_amd64.go#L83: 	sp -= goarch.PtrSize
		signal_linux_amd64.go#L55: 	*(*uintptr)(add(unsafe.Pointer(c.info), 2*goarch.PtrSize)) = uintptr(x)
		slice.go#L226: 	case et.size == goarch.PtrSize:
		slice.go#L227: 		lenmem = uintptr(old.len) * goarch.PtrSize
		slice.go#L228: 		newlenmem = uintptr(cap) * goarch.PtrSize
		slice.go#L229: 		capmem = roundupsize(uintptr(newcap) * goarch.PtrSize)
		slice.go#L230: 		overflow = uintptr(newcap) > maxAlloc/goarch.PtrSize
		slice.go#L231: 		newcap = int(capmem / goarch.PtrSize)
		slice.go#L234: 		if goarch.PtrSize == 8 {
		stack.go#L72: 	_StackSystem = goos.IsWindows*512*goarch.PtrSize + goos.IsPlan9*512 + goos.IsIos*goarch.IsArm64*1024
		stack.go#L130: 	uintptrMask = 1<<(8*goarch.PtrSize) - 1
		stack.go#L613: 				print("        ", add(scanp, (i+j)*goarch.PtrSize), ":", ptrnames[bv.ptrbit(i+j)], ":", hex(*(*uintptr)(add(scanp, (i+j)*goarch.PtrSize))), " # ", i, " ", *addb(bv.bytedata, i/8), "\n")
		stack.go#L620: 			pp := (*uintptr)(add(scanp, (i+j)*goarch.PtrSize))
		stack.go#L669: 		size := uintptr(locals.n) * goarch.PtrSize
		stack.go#L675: 	if goarch.ArchFamily == goarch.AMD64 && frame.argp-frame.varp == 2*goarch.PtrSize {
		stack.go#L725: 			for i := uintptr(0); i < ptrdata; i += goarch.PtrSize {
		stack.go#L726: 				if *addb(gcdata, i/(8*goarch.PtrSize))>>(i/goarch.PtrSize&7)&1 != 0 {
		stack.go#L1035: 		sp -= goarch.PtrSize
		stack.go#L1313: 			n := int32(frame.arglen / goarch.PtrSize)
		stack.go#L1345: 			p = add(p, goarch.PtrSize)
		symtab.go#L602: 		hdr.minLC != sys.PCQuantum || hdr.ptrSize != goarch.PtrSize || hdr.textStart != datap.text {
		symtab.go#L870: 	return (targetpc / goarch.PtrSize) % uintptr(len(pcvalueCache{}.entries))
		symtab.go#L1039: 	if debugPcln && x&(goarch.PtrSize-1) != 0 {
		sys_x86.go#L18: 	sp -= goarch.PtrSize
		trace.go#L846: 	return (*traceStack)(tab.mem.alloc(unsafe.Sizeof(traceStack{}) + uintptr(n)*goarch.PtrSize))
		trace.go#L946: 	data [64<<10 - goarch.PtrSize]byte
		trace.go#L957: 	n = alignUp(n, goarch.PtrSize)
		traceback.go#L95: 			frame.sp += goarch.PtrSize
		traceback.go#L190: 				frame.fp += goarch.PtrSize
		traceback.go#L231: 					lrPtr = frame.fp - goarch.PtrSize
		traceback.go#L262: 			frame.varp -= goarch.PtrSize
		traceback.go#L283: 			frame.varp -= goarch.PtrSize
		traceback.go#L708: 				retValid = *(*bool)(unsafe.Pointer(arg0 + 4*goarch.PtrSize))
		traceback.go#L715: 			arglen = uintptr(bv.n * goarch.PtrSize)
		traceback.go#L717: 				arglen = uintptr(mv.argLen) &^ (goarch.PtrSize - 1)
		traceback.go#L1060: 	const expand = 32 * goarch.PtrSize
		traceback.go#L1061: 	const maxExpand = 256 * goarch.PtrSize

	runtime/internal/math
		math.go#L14: 	if a|b < 1<<(4*goarch.PtrSize) || a == 0 {

	runtime/internal/sys
		consts.go#L23: const Int64Align = goarch.PtrSize