diff --git a/mm/vmalloc.c b/mm/vmalloc.c index f8f61ff3235b..aac34c2a410b 100644 --- a/mm/vmalloc.c +++ b/mm/vmalloc.c @@ -325,6 +325,8 @@ EXPORT_SYMBOL(vmalloc_to_pfn); /*** Global kva allocator ***/ +#define DEBUG_AUGMENT_PROPAGATE_CHECK 0 + #define VM_LAZY_FREE 0x02 #define VM_VM_AREA 0x04 @@ -539,6 +541,48 @@ unlink_va(struct vmap_area *va, struct rb_root *root) } } +#if DEBUG_AUGMENT_PROPAGATE_CHECK +static void +augment_tree_propagate_check(struct rb_node *n) +{ + struct vmap_area *va; + struct rb_node *node; + unsigned long size; + bool found = false; + + if (n == NULL) + return; + + va = rb_entry(n, struct vmap_area, rb_node); + size = va->subtree_max_size; + node = n; + + while (node) { + va = rb_entry(node, struct vmap_area, rb_node); + + if (get_subtree_max_size(node->rb_left) == size) { + node = node->rb_left; + } else { + if (va_size(va) == size) { + found = true; + break; + } + + node = node->rb_right; + } + } + + if (!found) { + va = rb_entry(n, struct vmap_area, rb_node); + pr_emerg("tree is corrupted: %lu, %lu\n", + va_size(va), va->subtree_max_size); + } + + augment_tree_propagate_check(n->rb_left); + augment_tree_propagate_check(n->rb_right); +} +#endif + /* * This function populates subtree_max_size from bottom to upper * levels starting from VA point. The propagation must be done @@ -588,6 +632,10 @@ augment_tree_propagate_from(struct vmap_area *va) va->subtree_max_size = new_va_sub_max_size; node = rb_parent(&va->rb_node); } + +#if DEBUG_AUGMENT_PROPAGATE_CHECK + augment_tree_propagate_check(free_vmap_area_root.rb_node); +#endif } static void