From: chenqiwu <chenqiwu@xxxxxxxxxx> 1) Simplify the code of initializing some variables. 1) Constify the LRU isolation mode. 2) Define a page_zoneidx variable to reduce the redundant code of page_zonenum(). Signed-off-by: chenqiwu <chenqiwu@xxxxxxxxxx> --- mm/vmscan.c | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/mm/vmscan.c b/mm/vmscan.c index b06868f..2f65a91 100644 --- a/mm/vmscan.c +++ b/mm/vmscan.c @@ -1638,16 +1638,15 @@ static unsigned long isolate_lru_pages(unsigned long nr_to_scan, struct list_head *src = &lruvec->lists[lru]; unsigned long nr_taken = 0; unsigned long nr_zone_taken[MAX_NR_ZONES] = { 0 }; - unsigned long nr_skipped[MAX_NR_ZONES] = { 0, }; + unsigned long nr_skipped[MAX_NR_ZONES] = { 0 }; unsigned long skipped = 0; - unsigned long scan, total_scan, nr_pages; + unsigned long scan = 0, total_scan = 0, nr_pages; LIST_HEAD(pages_skipped); - isolate_mode_t mode = (sc->may_unmap ? 0 : ISOLATE_UNMAPPED); + const isolate_mode_t mode = (sc->may_unmap ? 0 : ISOLATE_UNMAPPED); - total_scan = 0; - scan = 0; while (scan < nr_to_scan && !list_empty(src)) { struct page *page; + enum zone_type page_zoneidx; page = lru_to_page(src); prefetchw_prev_lru_page(page, src, flags); @@ -1657,9 +1656,10 @@ static unsigned long isolate_lru_pages(unsigned long nr_to_scan, nr_pages = compound_nr(page); total_scan += nr_pages; - if (page_zonenum(page) > sc->reclaim_idx) { + page_zoneidx = page_zonenum(page); + if (page_zoneidx > sc->reclaim_idx) { list_move(&page->lru, &pages_skipped); - nr_skipped[page_zonenum(page)] += nr_pages; + nr_skipped[page_zoneidx] += nr_pages; continue; } @@ -1677,7 +1677,7 @@ static unsigned long isolate_lru_pages(unsigned long nr_to_scan, switch (__isolate_lru_page(page, mode)) { case 0: nr_taken += nr_pages; - nr_zone_taken[page_zonenum(page)] += nr_pages; + nr_zone_taken[page_zoneidx] += nr_pages; list_move(&page->lru, dst); break; -- 1.9.1