Lines Matching refs:start_pfn

300 	unsigned long start_pfn, end_pfn;
303 for_each_mem_pfn_range(i, MAX_NUMNODES, &start_pfn, &end_pfn, &nid) {
304 unsigned long pages = end_pfn - start_pfn;
462 unsigned long start_pfn, end_pfn;
480 for_each_mem_pfn_range(i, nid, &start_pfn, &end_pfn, NULL) {
483 start_pfn = max(start_pfn, zone_movable_pfn[nid]);
484 if (start_pfn >= end_pfn)
488 if (start_pfn < usable_startpfn) {
491 - start_pfn;
510 start_pfn = usable_startpfn;
515 * start_pfn->end_pfn. Calculate size_pages as the
518 size_pages = end_pfn - start_pfn;
521 zone_movable_pfn[nid] = start_pfn + size_pages;
549 unsigned long start_pfn, end_pfn;
554 get_pfn_range_for_nid(nid, &start_pfn, &end_pfn);
602 unsigned long start_pfn, end_pfn;
608 nid = memblock_search_pfn_nid(pfn, &start_pfn, &end_pfn);
610 state->last_start = start_pfn;
751 unsigned long start_pfn = PFN_DOWN(start);
754 for (; start_pfn < end_pfn; start_pfn++) {
755 if (pfn_valid(start_pfn)) {
756 struct page *page = pfn_to_page(start_pfn);
758 init_reserved_page(start_pfn, nid);
850 unsigned long start_pfn, unsigned long zone_end_pfn,
854 unsigned long pfn, end_pfn = start_pfn + size;
872 if (start_pfn == altmap->base_pfn)
873 start_pfn += altmap->reserve;
878 for (pfn = start_pfn; pfn < end_pfn; ) {
911 unsigned long start_pfn,
919 start_pfn = clamp(start_pfn, zone_start_pfn, zone_end_pfn);
922 if (start_pfn >= end_pfn)
925 memmap_init_range(end_pfn - start_pfn, nid, zone_id, start_pfn,
928 if (*hole_pfn < start_pfn)
929 init_unavailable_range(*hole_pfn, start_pfn, zone_id, nid);
936 unsigned long start_pfn, end_pfn;
940 for_each_mem_pfn_range(i, MAX_NUMNODES, &start_pfn, &end_pfn, &nid) {
949 memmap_init_zone_range(zone, start_pfn, end_pfn,
1065 unsigned long start_pfn,
1069 unsigned long pfn, end_pfn = start_pfn + nr_pages;
1086 start_pfn = altmap->base_pfn + vmem_altmap_offset(altmap);
1087 nr_pages = end_pfn - start_pfn;
1090 for (pfn = start_pfn; pfn < end_pfn; pfn += pfns_per_compound) {
1152 unsigned long start_pfn, end_pfn;
1155 for_each_mem_pfn_range(i, nid, &start_pfn, &end_pfn, NULL) {
1156 start_pfn = clamp(start_pfn, range_start_pfn, range_end_pfn);
1158 nr_absent -= end_pfn - start_pfn;
1165 * @start_pfn: The start PFN to start searching for holes
1170 unsigned long __init absent_pages_in_range(unsigned long start_pfn,
1173 return __absent_pages_in_range(MAX_NUMNODES, start_pfn, end_pfn);
1196 unsigned long start_pfn, end_pfn;
1200 start_pfn = clamp(memblock_region_memory_base_pfn(r),
1207 nr_absent += end_pfn - start_pfn;
1211 nr_absent += end_pfn - start_pfn;
1678 * @start_pfn: Passed by reference. On return, it will have the node start_pfn.
1686 unsigned long *start_pfn, unsigned long *end_pfn)
1691 *start_pfn = -1UL;
1695 *start_pfn = min(*start_pfn, this_start_pfn);
1699 if (*start_pfn == -1UL)
1700 *start_pfn = 0;
1706 unsigned long start_pfn = 0;
1712 get_pfn_range_for_nid(nid, &start_pfn, &end_pfn);
1715 pgdat->node_start_pfn = start_pfn;
1718 if (start_pfn != end_pfn) {
1720 (u64)start_pfn << PAGE_SHIFT,
1723 calculate_node_totalpages(pgdat, start_pfn, end_pfn);
1791 unsigned long start_pfn, end_pfn;
1801 start_pfn = PHYS_PFN(memblock_start_of_DRAM());
1813 end_pfn = max(max_zone_pfn[zone], start_pfn);
1814 arch_zone_lowest_possible_pfn[zone] = start_pfn;
1817 start_pfn = end_pfn;
1855 for_each_mem_pfn_range(i, MAX_NUMNODES, &start_pfn, &end_pfn, &nid) {
1857 (u64)start_pfn << PAGE_SHIFT,
1859 subsection_map_init(start_pfn, end_pfn - start_pfn);
2104 deferred_init_maxorder(u64 *i, struct zone *zone, unsigned long *start_pfn,
2107 unsigned long mo_pfn = ALIGN(*start_pfn + 1, MAX_ORDER_NR_PAGES);
2108 unsigned long spfn = *start_pfn, epfn = *end_pfn;
2113 for_each_free_mem_pfn_range_in_zone_from(j, zone, start_pfn, end_pfn) {
2116 if (mo_pfn <= *start_pfn)
2120 nr_pages += deferred_init_pages(zone, *start_pfn, t);
2123 *start_pfn = mo_pfn;
2148 deferred_init_memmap_chunk(unsigned long start_pfn, unsigned long end_pfn,
2155 deferred_init_mem_pfn_range_in_zone(&i, zone, &spfn, &epfn, start_pfn);