| Michel Lespinasse | 6b2dbba | 2012-10-08 16:31:25 -0700 | [diff] [blame] | 1 | /* | 
 | 2 |  * mm/interval_tree.c - interval tree for mapping->i_mmap | 
 | 3 |  * | 
 | 4 |  * Copyright (C) 2012, Michel Lespinasse <walken@google.com> | 
 | 5 |  * | 
 | 6 |  * This file is released under the GPL v2. | 
 | 7 |  */ | 
 | 8 |  | 
 | 9 | #include <linux/mm.h> | 
 | 10 | #include <linux/fs.h> | 
| Michel Lespinasse | bf181b9 | 2012-10-08 16:31:39 -0700 | [diff] [blame] | 11 | #include <linux/rmap.h> | 
| Michel Lespinasse | 9826a51 | 2012-10-08 16:31:35 -0700 | [diff] [blame] | 12 | #include <linux/interval_tree_generic.h> | 
| Michel Lespinasse | 6b2dbba | 2012-10-08 16:31:25 -0700 | [diff] [blame] | 13 |  | 
| Michel Lespinasse | 9826a51 | 2012-10-08 16:31:35 -0700 | [diff] [blame] | 14 | static inline unsigned long vma_start_pgoff(struct vm_area_struct *v) | 
 | 15 | { | 
 | 16 | 	return v->vm_pgoff; | 
 | 17 | } | 
| Michel Lespinasse | 6b2dbba | 2012-10-08 16:31:25 -0700 | [diff] [blame] | 18 |  | 
| Michel Lespinasse | 9826a51 | 2012-10-08 16:31:35 -0700 | [diff] [blame] | 19 | static inline unsigned long vma_last_pgoff(struct vm_area_struct *v) | 
 | 20 | { | 
 | 21 | 	return v->vm_pgoff + ((v->vm_end - v->vm_start) >> PAGE_SHIFT) - 1; | 
 | 22 | } | 
| Michel Lespinasse | 6b2dbba | 2012-10-08 16:31:25 -0700 | [diff] [blame] | 23 |  | 
| Michel Lespinasse | 9826a51 | 2012-10-08 16:31:35 -0700 | [diff] [blame] | 24 | INTERVAL_TREE_DEFINE(struct vm_area_struct, shared.linear.rb, | 
 | 25 | 		     unsigned long, shared.linear.rb_subtree_last, | 
 | 26 | 		     vma_start_pgoff, vma_last_pgoff,, vma_interval_tree) | 
 | 27 |  | 
 | 28 | /* Insert node immediately after prev in the interval tree */ | 
 | 29 | void vma_interval_tree_insert_after(struct vm_area_struct *node, | 
 | 30 | 				    struct vm_area_struct *prev, | 
 | 31 | 				    struct rb_root *root) | 
| Michel Lespinasse | 6b2dbba | 2012-10-08 16:31:25 -0700 | [diff] [blame] | 32 | { | 
 | 33 | 	struct rb_node **link; | 
 | 34 | 	struct vm_area_struct *parent; | 
| Michel Lespinasse | 9826a51 | 2012-10-08 16:31:35 -0700 | [diff] [blame] | 35 | 	unsigned long last = vma_last_pgoff(node); | 
| Michel Lespinasse | 6b2dbba | 2012-10-08 16:31:25 -0700 | [diff] [blame] | 36 |  | 
| Michel Lespinasse | 9826a51 | 2012-10-08 16:31:35 -0700 | [diff] [blame] | 37 | 	VM_BUG_ON(vma_start_pgoff(node) != vma_start_pgoff(prev)); | 
| Michel Lespinasse | 6b2dbba | 2012-10-08 16:31:25 -0700 | [diff] [blame] | 38 |  | 
| Michel Lespinasse | 9826a51 | 2012-10-08 16:31:35 -0700 | [diff] [blame] | 39 | 	if (!prev->shared.linear.rb.rb_right) { | 
 | 40 | 		parent = prev; | 
 | 41 | 		link = &prev->shared.linear.rb.rb_right; | 
| Michel Lespinasse | 6b2dbba | 2012-10-08 16:31:25 -0700 | [diff] [blame] | 42 | 	} else { | 
| Michel Lespinasse | 9826a51 | 2012-10-08 16:31:35 -0700 | [diff] [blame] | 43 | 		parent = rb_entry(prev->shared.linear.rb.rb_right, | 
| Michel Lespinasse | 6b2dbba | 2012-10-08 16:31:25 -0700 | [diff] [blame] | 44 | 				  struct vm_area_struct, shared.linear.rb); | 
 | 45 | 		if (parent->shared.linear.rb_subtree_last < last) | 
 | 46 | 			parent->shared.linear.rb_subtree_last = last; | 
 | 47 | 		while (parent->shared.linear.rb.rb_left) { | 
 | 48 | 			parent = rb_entry(parent->shared.linear.rb.rb_left, | 
 | 49 | 				struct vm_area_struct, shared.linear.rb); | 
 | 50 | 			if (parent->shared.linear.rb_subtree_last < last) | 
 | 51 | 				parent->shared.linear.rb_subtree_last = last; | 
 | 52 | 		} | 
 | 53 | 		link = &parent->shared.linear.rb.rb_left; | 
 | 54 | 	} | 
 | 55 |  | 
| Michel Lespinasse | 9826a51 | 2012-10-08 16:31:35 -0700 | [diff] [blame] | 56 | 	node->shared.linear.rb_subtree_last = last; | 
 | 57 | 	rb_link_node(&node->shared.linear.rb, &parent->shared.linear.rb, link); | 
 | 58 | 	rb_insert_augmented(&node->shared.linear.rb, root, | 
 | 59 | 			    &vma_interval_tree_augment); | 
| Michel Lespinasse | 6b2dbba | 2012-10-08 16:31:25 -0700 | [diff] [blame] | 60 | } | 
| Michel Lespinasse | bf181b9 | 2012-10-08 16:31:39 -0700 | [diff] [blame] | 61 |  | 
 | 62 | static inline unsigned long avc_start_pgoff(struct anon_vma_chain *avc) | 
 | 63 | { | 
 | 64 | 	return vma_start_pgoff(avc->vma); | 
 | 65 | } | 
 | 66 |  | 
 | 67 | static inline unsigned long avc_last_pgoff(struct anon_vma_chain *avc) | 
 | 68 | { | 
 | 69 | 	return vma_last_pgoff(avc->vma); | 
 | 70 | } | 
 | 71 |  | 
 | 72 | INTERVAL_TREE_DEFINE(struct anon_vma_chain, rb, unsigned long, rb_subtree_last, | 
| Michel Lespinasse | ed8ea81 | 2012-10-08 16:31:45 -0700 | [diff] [blame] | 73 | 		     avc_start_pgoff, avc_last_pgoff, | 
 | 74 | 		     static inline, __anon_vma_interval_tree) | 
 | 75 |  | 
 | 76 | void anon_vma_interval_tree_insert(struct anon_vma_chain *node, | 
 | 77 | 				   struct rb_root *root) | 
 | 78 | { | 
 | 79 | #ifdef CONFIG_DEBUG_VM_RB | 
 | 80 | 	node->cached_vma_start = avc_start_pgoff(node); | 
 | 81 | 	node->cached_vma_last = avc_last_pgoff(node); | 
 | 82 | #endif | 
 | 83 | 	__anon_vma_interval_tree_insert(node, root); | 
 | 84 | } | 
 | 85 |  | 
 | 86 | void anon_vma_interval_tree_remove(struct anon_vma_chain *node, | 
 | 87 | 				   struct rb_root *root) | 
 | 88 | { | 
 | 89 | 	__anon_vma_interval_tree_remove(node, root); | 
 | 90 | } | 
 | 91 |  | 
 | 92 | struct anon_vma_chain * | 
 | 93 | anon_vma_interval_tree_iter_first(struct rb_root *root, | 
 | 94 | 				  unsigned long first, unsigned long last) | 
 | 95 | { | 
 | 96 | 	return __anon_vma_interval_tree_iter_first(root, first, last); | 
 | 97 | } | 
 | 98 |  | 
 | 99 | struct anon_vma_chain * | 
 | 100 | anon_vma_interval_tree_iter_next(struct anon_vma_chain *node, | 
 | 101 | 				 unsigned long first, unsigned long last) | 
 | 102 | { | 
 | 103 | 	return __anon_vma_interval_tree_iter_next(node, first, last); | 
 | 104 | } | 
 | 105 |  | 
 | 106 | #ifdef CONFIG_DEBUG_VM_RB | 
 | 107 | void anon_vma_interval_tree_verify(struct anon_vma_chain *node) | 
 | 108 | { | 
 | 109 | 	WARN_ON_ONCE(node->cached_vma_start != avc_start_pgoff(node)); | 
 | 110 | 	WARN_ON_ONCE(node->cached_vma_last != avc_last_pgoff(node)); | 
 | 111 | } | 
 | 112 | #endif |