Lines Matching refs:order
127 void defer_compaction(struct zone *zone, int order) in defer_compaction() argument
132 if (order < zone->compact_order_failed) in defer_compaction()
133 zone->compact_order_failed = order; in defer_compaction()
138 trace_mm_compaction_defer_compaction(zone, order); in defer_compaction()
142 bool compaction_deferred(struct zone *zone, int order) in compaction_deferred() argument
146 if (order < zone->compact_order_failed) in compaction_deferred()
156 trace_mm_compaction_deferred(zone, order); in compaction_deferred()
166 void compaction_defer_reset(struct zone *zone, int order, in compaction_defer_reset() argument
173 if (order >= zone->compact_order_failed) in compaction_defer_reset()
174 zone->compact_order_failed = order + 1; in compaction_defer_reset()
176 trace_mm_compaction_defer_reset(zone, order); in compaction_defer_reset()
180 bool compaction_restarting(struct zone *zone, int order) in compaction_restarting() argument
182 if (order < zone->compact_order_failed) in compaction_restarting()
1185 static inline bool is_via_compact_memory(int order) in is_via_compact_memory() argument
1187 return order == -1; in is_via_compact_memory()
1193 unsigned int order; in __compact_finished() local
1216 if (is_via_compact_memory(cc->order)) in __compact_finished()
1222 if (!zone_watermark_ok(zone, cc->order, watermark, cc->classzone_idx, in __compact_finished()
1227 for (order = cc->order; order < MAX_ORDER; order++) { in __compact_finished()
1228 struct free_area *area = &zone->free_area[order]; in __compact_finished()
1245 if (find_suitable_fallback(area, order, migratetype, in __compact_finished()
1259 trace_mm_compaction_finished(zone, cc->order, ret); in compact_finished()
1273 static unsigned long __compaction_suitable(struct zone *zone, int order, in __compaction_suitable() argument
1279 if (is_via_compact_memory(order)) in __compaction_suitable()
1287 if (zone_watermark_ok(zone, order, watermark, classzone_idx, in __compaction_suitable()
1296 watermark += (2UL << order); in __compaction_suitable()
1311 fragindex = fragmentation_index(zone, order); in __compaction_suitable()
1318 unsigned long compaction_suitable(struct zone *zone, int order, in compaction_suitable() argument
1323 ret = __compaction_suitable(zone, order, alloc_flags, classzone_idx); in compaction_suitable()
1324 trace_mm_compaction_suitable(zone, order, ret); in compaction_suitable()
1339 ret = compaction_suitable(zone, cc->order, cc->alloc_flags, in compact_zone()
1356 if (compaction_restarting(zone, cc->order) && !current_is_kswapd()) in compact_zone()
1432 if (cc->order > 0 && cc->last_migrated_pfn) { in compact_zone()
1435 cc->migrate_pfn & ~((1UL << cc->order) - 1); in compact_zone()
1478 static unsigned long compact_zone_order(struct zone *zone, int order, in compact_zone_order() argument
1486 .order = order, in compact_zone_order()
1519 unsigned long try_to_compact_pages(gfp_t gfp_mask, unsigned int order, in try_to_compact_pages() argument
1533 if (!order || !may_enter_fs || !may_perform_io) in try_to_compact_pages()
1536 trace_mm_compaction_try_to_compact_pages(order, gfp_mask, mode); in try_to_compact_pages()
1544 if (compaction_deferred(zone, order)) in try_to_compact_pages()
1547 status = compact_zone_order(zone, order, gfp_mask, mode, in try_to_compact_pages()
1558 if (zone_watermark_ok(zone, order, low_wmark_pages(zone), in try_to_compact_pages()
1566 compaction_defer_reset(zone, order, false); in try_to_compact_pages()
1587 defer_compaction(zone, order); in try_to_compact_pages()
1646 if (is_via_compact_memory(cc->order)) in __compact_pgdat()
1649 if (is_via_compact_memory(cc->order) || in __compact_pgdat()
1650 !compaction_deferred(zone, cc->order)) in __compact_pgdat()
1653 if (cc->order > 0) { in __compact_pgdat()
1654 if (zone_watermark_ok(zone, cc->order, in __compact_pgdat()
1656 compaction_defer_reset(zone, cc->order, false); in __compact_pgdat()
1664 void compact_pgdat(pg_data_t *pgdat, int order) in compact_pgdat() argument
1667 .order = order, in compact_pgdat()
1671 if (!order) in compact_pgdat()
1680 .order = -1, in compact_node()