In coming patch, memblock allocator also utilizes node fall back list info.
Hence extracting the related code for reusing.

Signed-off-by: Pingfan Liu <kernelf...@gmail.com>
CC: Thomas Gleixner <t...@linutronix.de>
CC: Ingo Molnar <mi...@redhat.com>
CC: Borislav Petkov <b...@alien8.de>
CC: "H. Peter Anvin" <h...@zytor.com>
CC: Dave Hansen <dave.han...@linux.intel.com>
CC: Vlastimil Babka <vba...@suse.cz>
CC: Mike Rapoport <r...@linux.vnet.ibm.com>
CC: Andrew Morton <a...@linux-foundation.org>
CC: Mel Gorman <mgor...@suse.de>
CC: Joonsoo Kim <iamjoonsoo....@lge.com>
CC: Andy Lutomirski <l...@kernel.org>
CC: Andi Kleen <a...@linux.intel.com>
CC: Petr Tesarik <ptesa...@suse.cz>
CC: Michal Hocko <mho...@suse.com>
CC: Stephen Rothwell <s...@canb.auug.org.au>
CC: Jonathan Corbet <cor...@lwn.net>
CC: Nicholas Piggin <npig...@gmail.com>
CC: Daniel Vacek <ne...@redhat.com>
CC: linux-kernel@vger.kernel.org
---
 mm/page_alloc.c | 48 +++++++++++++++++++++++++++++-------------------
 1 file changed, 29 insertions(+), 19 deletions(-)

diff --git a/mm/page_alloc.c b/mm/page_alloc.c
index 35fdde0..a6967a1 100644
--- a/mm/page_alloc.c
+++ b/mm/page_alloc.c
@@ -5380,6 +5380,32 @@ static void build_thisnode_zonelists(pg_data_t *pgdat)
        zonerefs->zone_idx = 0;
 }
 
+int build_node_order(int *node_oder_array, int sz,
+       int local_node, nodemask_t *used_mask)
+{
+       int node, nr_nodes = 0;
+       int prev_node = local_node;
+       int load = nr_online_nodes;
+
+
+       while ((node = find_next_best_node(local_node, used_mask)) >= 0
+               && nr_nodes < sz) {
+               /*
+                * We don't want to pressure a particular node.
+                * So adding penalty to the first node in same
+                * distance group to make it round-robin.
+                */
+               if (node_distance(local_node, node) !=
+                   node_distance(local_node, prev_node))
+                       node_load[node] = load;
+
+               node_oder_array[nr_nodes++] = node;
+               prev_node = node;
+               load--;
+       }
+       return nr_nodes;
+}
+
 /*
  * Build zonelists ordered by zone and nodes within zones.
  * This results in conserving DMA zone[s] until all Normal memory is
@@ -5390,32 +5416,16 @@ static void build_thisnode_zonelists(pg_data_t *pgdat)
 static void build_zonelists(pg_data_t *pgdat)
 {
        static int node_order[MAX_NUMNODES];
-       int node, load, nr_nodes = 0;
+       int local_node, nr_nodes = 0;
        nodemask_t used_mask;
-       int local_node, prev_node;
 
        /* NUMA-aware ordering of nodes */
        local_node = pgdat->node_id;
-       load = nr_online_nodes;
-       prev_node = local_node;
        nodes_clear(used_mask);
 
        memset(node_order, 0, sizeof(node_order));
-       while ((node = find_next_best_node(local_node, &used_mask)) >= 0) {
-               /*
-                * We don't want to pressure a particular node.
-                * So adding penalty to the first node in same
-                * distance group to make it round-robin.
-                */
-               if (node_distance(local_node, node) !=
-                   node_distance(local_node, prev_node))
-                       node_load[node] = load;
-
-               node_order[nr_nodes++] = node;
-               prev_node = node;
-               load--;
-       }
-
+       nr_nodes = build_node_order(node_order, MAX_NUMNODES,
+               local_node, &used_mask);
        build_zonelists_in_node_order(pgdat, node_order, nr_nodes);
        build_thisnode_zonelists(pgdat);
 }
-- 
2.7.4

Reply via email to