Lines Matching refs:size

30 	size_t size;
74 AllocPhysPages(size_t size)
76 size = ROUNDUP(size, B_PAGE_SIZE);
79 if (adr + size - (addr_t)gMemBase > gTotalMem)
82 gFreeMem = (uint8*)(adr + size);
96 FreePhysPages(phys_addr_t physAdr, size_t size)
98 if (physAdr + size == (phys_addr_t)gFreeMem)
99 gFreeMem -= size;
104 AllocVirtPages(size_t size)
106 size = ROUNDUP(size, B_PAGE_SIZE);
108 gFreeVirtMem = adr + size;
115 FreeVirtPages(addr_t virtAdr, size_t size)
117 if (virtAdr + size == gFreeVirtMem)
118 gFreeVirtMem -= size;
185 MapRange(addr_t virtAdr, phys_addr_t physAdr, size_t size, uint64 flags)
188 B_PRIxADDR ", ", virtAdr, physAdr, size);
191 for (size_t i = 0; i < size; i += B_PAGE_SIZE)
194 ASSERT_ALWAYS(insert_virtual_allocated_range(virtAdr, size) >= B_OK);
199 MapRangeIdentity(addr_t adr, size_t size, uint64 flags)
201 MapRange(adr, adr, size, flags);
209 range.start = AllocVirtPages(range.size);
211 MapRange(range.start, physAdr, range.size, flags);
251 gKernelArgs.arch_args.physMap.size
252 = gKernelArgs.physical_memory_range[0].size;
254 - gKernelArgs.arch_args.physMap.size;
257 gKernelArgs.arch_args.physMap.size,
272 MapRange(region->virtAdr, region->physAdr, region->size, flags.val);
300 platform_allocate_region(void** address, size_t size, uint8 protection,
303 size = ROUNDUP(size, B_PAGE_SIZE);
312 region->physAdr = AllocPhysPages(size);
316 region->virtAdr = AllocVirtPages(size);
317 region->size = size;
330 platform_free_region(void* address, size_t size)
342 FreePhysPages(region->physAdr, region->size);
343 FreeVirtPages(region->virtAdr, region->size);
379 && (phys_addr_t)address < region->physAdr + region->size))
395 && (phys_addr_t)address < region->virtAdr + region->size))
426 gKernelArgs.cpu_kstack[0].size = KERNEL_STACK_SIZE