In coming patch, memblock allocator also utilizes node fall back list info. Hence extracting the related code for reusing. Signed-off-by: Pingfan Liu <kernelfans@xxxxxxxxx> CC: Thomas Gleixner <tglx@xxxxxxxxxxxxx> CC: Ingo Molnar <mingo@xxxxxxxxxx> CC: Borislav Petkov <bp@xxxxxxxxx> CC: "H. Peter Anvin" <hpa@xxxxxxxxx> CC: Dave Hansen <dave.hansen@xxxxxxxxxxxxxxx> CC: Vlastimil Babka <vbabka@xxxxxxx> CC: Mike Rapoport <rppt@xxxxxxxxxxxxxxxxxx> CC: Andrew Morton <akpm@xxxxxxxxxxxxxxxxxxxx> CC: Mel Gorman <mgorman@xxxxxxx> CC: Joonsoo Kim <iamjoonsoo.kim@xxxxxxx> CC: Andy Lutomirski <luto@xxxxxxxxxx> CC: Andi Kleen <ak@xxxxxxxxxxxxxxx> CC: Petr Tesarik <ptesarik@xxxxxxx> CC: Michal Hocko <mhocko@xxxxxxxx> CC: Stephen Rothwell <sfr@xxxxxxxxxxxxxxxx> CC: Jonathan Corbet <corbet@xxxxxxx> CC: Nicholas Piggin <npiggin@xxxxxxxxx> CC: Daniel Vacek <neelx@xxxxxxxxxx> CC: linux-kernel@xxxxxxxxxxxxxxx --- mm/page_alloc.c | 48 +++++++++++++++++++++++++++++------------------- 1 file changed, 29 insertions(+), 19 deletions(-) diff --git a/mm/page_alloc.c b/mm/page_alloc.c index 35fdde0..a6967a1 100644 --- a/mm/page_alloc.c +++ b/mm/page_alloc.c @@ -5380,6 +5380,32 @@ static void build_thisnode_zonelists(pg_data_t *pgdat) zonerefs->zone_idx = 0; } +int build_node_order(int *node_oder_array, int sz, + int local_node, nodemask_t *used_mask) +{ + int node, nr_nodes = 0; + int prev_node = local_node; + int load = nr_online_nodes; + + + while ((node = find_next_best_node(local_node, used_mask)) >= 0 + && nr_nodes < sz) { + /* + * We don't want to pressure a particular node. + * So adding penalty to the first node in same + * distance group to make it round-robin. + */ + if (node_distance(local_node, node) != + node_distance(local_node, prev_node)) + node_load[node] = load; + + node_oder_array[nr_nodes++] = node; + prev_node = node; + load--; + } + return nr_nodes; +} + /* * Build zonelists ordered by zone and nodes within zones. * This results in conserving DMA zone[s] until all Normal memory is @@ -5390,32 +5416,16 @@ static void build_thisnode_zonelists(pg_data_t *pgdat) static void build_zonelists(pg_data_t *pgdat) { static int node_order[MAX_NUMNODES]; - int node, load, nr_nodes = 0; + int local_node, nr_nodes = 0; nodemask_t used_mask; - int local_node, prev_node; /* NUMA-aware ordering of nodes */ local_node = pgdat->node_id; - load = nr_online_nodes; - prev_node = local_node; nodes_clear(used_mask); memset(node_order, 0, sizeof(node_order)); - while ((node = find_next_best_node(local_node, &used_mask)) >= 0) { - /* - * We don't want to pressure a particular node. - * So adding penalty to the first node in same - * distance group to make it round-robin. - */ - if (node_distance(local_node, node) != - node_distance(local_node, prev_node)) - node_load[node] = load; - - node_order[nr_nodes++] = node; - prev_node = node; - load--; - } - + nr_nodes = build_node_order(node_order, MAX_NUMNODES, + local_node, &used_mask); build_zonelists_in_node_order(pgdat, node_order, nr_nodes); build_thisnode_zonelists(pgdat); } -- 2.7.4