| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 1 | #include <linux/err.h> | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 2 | #include <linux/slab.h> | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 3 | #include <linux/module.h> | 
 | 4 | #include <linux/spinlock.h> | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 5 | #include <linux/hardirq.h> | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 6 | #include "extent_map.h" | 
 | 7 |  | 
| Chris Mason | 86479a0 | 2007-09-10 19:58:16 -0400 | [diff] [blame] | 8 |  | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 9 | static struct kmem_cache *extent_map_cache; | 
| Chris Mason | ca66462 | 2007-11-27 11:16:35 -0500 | [diff] [blame] | 10 |  | 
| Wyatt Banks | 2f4cbe6 | 2007-11-19 10:22:33 -0500 | [diff] [blame] | 11 | int __init extent_map_init(void) | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 12 | { | 
| Christoph Hellwig | 9601e3f | 2009-04-13 15:33:09 +0200 | [diff] [blame] | 13 | 	extent_map_cache = kmem_cache_create("extent_map", | 
 | 14 | 			sizeof(struct extent_map), 0, | 
 | 15 | 			SLAB_RECLAIM_ACCOUNT | SLAB_MEM_SPREAD, NULL); | 
| Wyatt Banks | 2f4cbe6 | 2007-11-19 10:22:33 -0500 | [diff] [blame] | 16 | 	if (!extent_map_cache) | 
 | 17 | 		return -ENOMEM; | 
| Wyatt Banks | 2f4cbe6 | 2007-11-19 10:22:33 -0500 | [diff] [blame] | 18 | 	return 0; | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 19 | } | 
 | 20 |  | 
| Christian Hesse | 17636e0 | 2007-12-11 09:25:06 -0500 | [diff] [blame] | 21 | void extent_map_exit(void) | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 22 | { | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 23 | 	if (extent_map_cache) | 
 | 24 | 		kmem_cache_destroy(extent_map_cache); | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 25 | } | 
 | 26 |  | 
| Christoph Hellwig | 9d2423c | 2008-06-11 21:52:17 -0400 | [diff] [blame] | 27 | /** | 
 | 28 |  * extent_map_tree_init - initialize extent map tree | 
 | 29 |  * @tree:		tree to initialize | 
 | 30 |  * @mask:		flags for memory allocations during tree operations | 
 | 31 |  * | 
 | 32 |  * Initialize the extent tree @tree.  Should be called for each new inode | 
 | 33 |  * or other user of the extent_map interface. | 
 | 34 |  */ | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 35 | void extent_map_tree_init(struct extent_map_tree *tree, gfp_t mask) | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 36 | { | 
| Eric Paris | 6bef4d3 | 2010-02-23 19:43:04 +0000 | [diff] [blame] | 37 | 	tree->map = RB_ROOT; | 
| Chris Mason | 890871b | 2009-09-02 16:24:52 -0400 | [diff] [blame] | 38 | 	rwlock_init(&tree->lock); | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 39 | } | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 40 |  | 
| Christoph Hellwig | 9d2423c | 2008-06-11 21:52:17 -0400 | [diff] [blame] | 41 | /** | 
 | 42 |  * alloc_extent_map - allocate new extent map structure | 
 | 43 |  * @mask:	memory allocation flags | 
 | 44 |  * | 
 | 45 |  * Allocate a new extent_map structure.  The new structure is | 
 | 46 |  * returned with a reference count of one and needs to be | 
 | 47 |  * freed using free_extent_map() | 
 | 48 |  */ | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 49 | struct extent_map *alloc_extent_map(gfp_t mask) | 
 | 50 | { | 
 | 51 | 	struct extent_map *em; | 
 | 52 | 	em = kmem_cache_alloc(extent_map_cache, mask); | 
 | 53 | 	if (!em || IS_ERR(em)) | 
 | 54 | 		return em; | 
 | 55 | 	em->in_tree = 0; | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 56 | 	em->flags = 0; | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 57 | 	atomic_set(&em->refs, 1); | 
 | 58 | 	return em; | 
 | 59 | } | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 60 |  | 
| Christoph Hellwig | 9d2423c | 2008-06-11 21:52:17 -0400 | [diff] [blame] | 61 | /** | 
 | 62 |  * free_extent_map - drop reference count of an extent_map | 
 | 63 |  * @em:		extent map beeing releasead | 
 | 64 |  * | 
 | 65 |  * Drops the reference out on @em by one and free the structure | 
 | 66 |  * if the reference count hits zero. | 
 | 67 |  */ | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 68 | void free_extent_map(struct extent_map *em) | 
 | 69 | { | 
| Chris Mason | 2bf5a72 | 2007-08-30 11:54:02 -0400 | [diff] [blame] | 70 | 	if (!em) | 
 | 71 | 		return; | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 72 | 	WARN_ON(atomic_read(&em->refs) == 0); | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 73 | 	if (atomic_dec_and_test(&em->refs)) { | 
 | 74 | 		WARN_ON(em->in_tree); | 
 | 75 | 		kmem_cache_free(extent_map_cache, em); | 
 | 76 | 	} | 
 | 77 | } | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 78 |  | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 79 | static struct rb_node *tree_insert(struct rb_root *root, u64 offset, | 
 | 80 | 				   struct rb_node *node) | 
 | 81 | { | 
| Chris Mason | d397712 | 2009-01-05 21:25:51 -0500 | [diff] [blame] | 82 | 	struct rb_node **p = &root->rb_node; | 
 | 83 | 	struct rb_node *parent = NULL; | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 84 | 	struct extent_map *entry; | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 85 |  | 
| Chris Mason | d397712 | 2009-01-05 21:25:51 -0500 | [diff] [blame] | 86 | 	while (*p) { | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 87 | 		parent = *p; | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 88 | 		entry = rb_entry(parent, struct extent_map, rb_node); | 
 | 89 |  | 
 | 90 | 		WARN_ON(!entry->in_tree); | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 91 |  | 
 | 92 | 		if (offset < entry->start) | 
 | 93 | 			p = &(*p)->rb_left; | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 94 | 		else if (offset >= extent_map_end(entry)) | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 95 | 			p = &(*p)->rb_right; | 
 | 96 | 		else | 
 | 97 | 			return parent; | 
 | 98 | 	} | 
 | 99 |  | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 100 | 	entry = rb_entry(node, struct extent_map, rb_node); | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 101 | 	entry->in_tree = 1; | 
 | 102 | 	rb_link_node(node, parent, p); | 
 | 103 | 	rb_insert_color(node, root); | 
 | 104 | 	return NULL; | 
 | 105 | } | 
 | 106 |  | 
| Chris Mason | d352ac6 | 2008-09-29 15:18:18 -0400 | [diff] [blame] | 107 | /* | 
 | 108 |  * search through the tree for an extent_map with a given offset.  If | 
 | 109 |  * it can't be found, try to find some neighboring extents | 
 | 110 |  */ | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 111 | static struct rb_node *__tree_search(struct rb_root *root, u64 offset, | 
| Chris Mason | 5f56406 | 2008-01-22 16:47:59 -0500 | [diff] [blame] | 112 | 				     struct rb_node **prev_ret, | 
 | 113 | 				     struct rb_node **next_ret) | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 114 | { | 
| Chris Mason | d397712 | 2009-01-05 21:25:51 -0500 | [diff] [blame] | 115 | 	struct rb_node *n = root->rb_node; | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 116 | 	struct rb_node *prev = NULL; | 
| Chris Mason | 5f56406 | 2008-01-22 16:47:59 -0500 | [diff] [blame] | 117 | 	struct rb_node *orig_prev = NULL; | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 118 | 	struct extent_map *entry; | 
 | 119 | 	struct extent_map *prev_entry = NULL; | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 120 |  | 
| Chris Mason | d397712 | 2009-01-05 21:25:51 -0500 | [diff] [blame] | 121 | 	while (n) { | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 122 | 		entry = rb_entry(n, struct extent_map, rb_node); | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 123 | 		prev = n; | 
 | 124 | 		prev_entry = entry; | 
 | 125 |  | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 126 | 		WARN_ON(!entry->in_tree); | 
 | 127 |  | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 128 | 		if (offset < entry->start) | 
 | 129 | 			n = n->rb_left; | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 130 | 		else if (offset >= extent_map_end(entry)) | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 131 | 			n = n->rb_right; | 
 | 132 | 		else | 
 | 133 | 			return n; | 
 | 134 | 	} | 
| Chris Mason | 5f56406 | 2008-01-22 16:47:59 -0500 | [diff] [blame] | 135 |  | 
 | 136 | 	if (prev_ret) { | 
 | 137 | 		orig_prev = prev; | 
| Chris Mason | d397712 | 2009-01-05 21:25:51 -0500 | [diff] [blame] | 138 | 		while (prev && offset >= extent_map_end(prev_entry)) { | 
| Chris Mason | 5f56406 | 2008-01-22 16:47:59 -0500 | [diff] [blame] | 139 | 			prev = rb_next(prev); | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 140 | 			prev_entry = rb_entry(prev, struct extent_map, rb_node); | 
| Chris Mason | 5f56406 | 2008-01-22 16:47:59 -0500 | [diff] [blame] | 141 | 		} | 
 | 142 | 		*prev_ret = prev; | 
 | 143 | 		prev = orig_prev; | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 144 | 	} | 
| Chris Mason | 5f56406 | 2008-01-22 16:47:59 -0500 | [diff] [blame] | 145 |  | 
 | 146 | 	if (next_ret) { | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 147 | 		prev_entry = rb_entry(prev, struct extent_map, rb_node); | 
| Chris Mason | d397712 | 2009-01-05 21:25:51 -0500 | [diff] [blame] | 148 | 		while (prev && offset < prev_entry->start) { | 
| Chris Mason | 5f56406 | 2008-01-22 16:47:59 -0500 | [diff] [blame] | 149 | 			prev = rb_prev(prev); | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 150 | 			prev_entry = rb_entry(prev, struct extent_map, rb_node); | 
| Chris Mason | 5f56406 | 2008-01-22 16:47:59 -0500 | [diff] [blame] | 151 | 		} | 
 | 152 | 		*next_ret = prev; | 
 | 153 | 	} | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 154 | 	return NULL; | 
 | 155 | } | 
 | 156 |  | 
| Chris Mason | d352ac6 | 2008-09-29 15:18:18 -0400 | [diff] [blame] | 157 | /* check to see if two extent_map structs are adjacent and safe to merge */ | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 158 | static int mergable_maps(struct extent_map *prev, struct extent_map *next) | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 159 | { | 
| Chris Mason | 7f3c74f | 2008-07-18 12:01:11 -0400 | [diff] [blame] | 160 | 	if (test_bit(EXTENT_FLAG_PINNED, &prev->flags)) | 
 | 161 | 		return 0; | 
 | 162 |  | 
| Chris Mason | c8b9781 | 2008-10-29 14:49:59 -0400 | [diff] [blame] | 163 | 	/* | 
 | 164 | 	 * don't merge compressed extents, we need to know their | 
 | 165 | 	 * actual size | 
 | 166 | 	 */ | 
 | 167 | 	if (test_bit(EXTENT_FLAG_COMPRESSED, &prev->flags)) | 
 | 168 | 		return 0; | 
 | 169 |  | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 170 | 	if (extent_map_end(prev) == next->start && | 
 | 171 | 	    prev->flags == next->flags && | 
 | 172 | 	    prev->bdev == next->bdev && | 
 | 173 | 	    ((next->block_start == EXTENT_MAP_HOLE && | 
 | 174 | 	      prev->block_start == EXTENT_MAP_HOLE) || | 
 | 175 | 	     (next->block_start == EXTENT_MAP_INLINE && | 
 | 176 | 	      prev->block_start == EXTENT_MAP_INLINE) || | 
 | 177 | 	     (next->block_start == EXTENT_MAP_DELALLOC && | 
 | 178 | 	      prev->block_start == EXTENT_MAP_DELALLOC) || | 
 | 179 | 	     (next->block_start < EXTENT_MAP_LAST_BYTE - 1 && | 
 | 180 | 	      next->block_start == extent_map_block_end(prev)))) { | 
 | 181 | 		return 1; | 
 | 182 | 	} | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 183 | 	return 0; | 
 | 184 | } | 
 | 185 |  | 
| Chris Mason | a1ed835 | 2009-09-11 12:27:37 -0400 | [diff] [blame] | 186 | int unpin_extent_cache(struct extent_map_tree *tree, u64 start, u64 len) | 
 | 187 | { | 
 | 188 | 	int ret = 0; | 
 | 189 | 	struct extent_map *merge = NULL; | 
 | 190 | 	struct rb_node *rb; | 
 | 191 | 	struct extent_map *em; | 
 | 192 |  | 
 | 193 | 	write_lock(&tree->lock); | 
 | 194 | 	em = lookup_extent_mapping(tree, start, len); | 
 | 195 |  | 
| Dan Carpenter | 4eb3991 | 2009-11-10 09:01:43 +0000 | [diff] [blame] | 196 | 	WARN_ON(!em || em->start != start); | 
| Chris Mason | a1ed835 | 2009-09-11 12:27:37 -0400 | [diff] [blame] | 197 |  | 
 | 198 | 	if (!em) | 
 | 199 | 		goto out; | 
 | 200 |  | 
 | 201 | 	clear_bit(EXTENT_FLAG_PINNED, &em->flags); | 
 | 202 |  | 
 | 203 | 	if (em->start != 0) { | 
 | 204 | 		rb = rb_prev(&em->rb_node); | 
 | 205 | 		if (rb) | 
 | 206 | 			merge = rb_entry(rb, struct extent_map, rb_node); | 
 | 207 | 		if (rb && mergable_maps(merge, em)) { | 
 | 208 | 			em->start = merge->start; | 
 | 209 | 			em->len += merge->len; | 
 | 210 | 			em->block_len += merge->block_len; | 
 | 211 | 			em->block_start = merge->block_start; | 
 | 212 | 			merge->in_tree = 0; | 
 | 213 | 			rb_erase(&merge->rb_node, &tree->map); | 
 | 214 | 			free_extent_map(merge); | 
 | 215 | 		} | 
 | 216 | 	} | 
 | 217 |  | 
 | 218 | 	rb = rb_next(&em->rb_node); | 
 | 219 | 	if (rb) | 
 | 220 | 		merge = rb_entry(rb, struct extent_map, rb_node); | 
 | 221 | 	if (rb && mergable_maps(em, merge)) { | 
 | 222 | 		em->len += merge->len; | 
 | 223 | 		em->block_len += merge->len; | 
 | 224 | 		rb_erase(&merge->rb_node, &tree->map); | 
 | 225 | 		merge->in_tree = 0; | 
 | 226 | 		free_extent_map(merge); | 
 | 227 | 	} | 
 | 228 |  | 
 | 229 | 	free_extent_map(em); | 
 | 230 | out: | 
 | 231 | 	write_unlock(&tree->lock); | 
 | 232 | 	return ret; | 
 | 233 |  | 
 | 234 | } | 
 | 235 |  | 
| Christoph Hellwig | 9d2423c | 2008-06-11 21:52:17 -0400 | [diff] [blame] | 236 | /** | 
 | 237 |  * add_extent_mapping - add new extent map to the extent tree | 
 | 238 |  * @tree:	tree to insert new map in | 
 | 239 |  * @em:		map to insert | 
 | 240 |  * | 
 | 241 |  * Insert @em into @tree or perform a simple forward/backward merge with | 
 | 242 |  * existing mappings.  The extent_map struct passed in will be inserted | 
 | 243 |  * into the tree directly, with an additional reference taken, or a | 
| André Goddard Rosa | af901ca | 2009-11-14 13:09:05 -0200 | [diff] [blame] | 244 |  * reference dropped if the merge attempt was successfull. | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 245 |  */ | 
 | 246 | int add_extent_mapping(struct extent_map_tree *tree, | 
 | 247 | 		       struct extent_map *em) | 
 | 248 | { | 
 | 249 | 	int ret = 0; | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 250 | 	struct extent_map *merge = NULL; | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 251 | 	struct rb_node *rb; | 
| Chris Mason | 7c2fe32 | 2008-08-20 08:51:50 -0400 | [diff] [blame] | 252 | 	struct extent_map *exist; | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 253 |  | 
| Chris Mason | 7c2fe32 | 2008-08-20 08:51:50 -0400 | [diff] [blame] | 254 | 	exist = lookup_extent_mapping(tree, em->start, em->len); | 
 | 255 | 	if (exist) { | 
 | 256 | 		free_extent_map(exist); | 
 | 257 | 		ret = -EEXIST; | 
 | 258 | 		goto out; | 
 | 259 | 	} | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 260 | 	rb = tree_insert(&tree->map, em->start, &em->rb_node); | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 261 | 	if (rb) { | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 262 | 		ret = -EEXIST; | 
 | 263 | 		goto out; | 
 | 264 | 	} | 
 | 265 | 	atomic_inc(&em->refs); | 
 | 266 | 	if (em->start != 0) { | 
 | 267 | 		rb = rb_prev(&em->rb_node); | 
 | 268 | 		if (rb) | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 269 | 			merge = rb_entry(rb, struct extent_map, rb_node); | 
 | 270 | 		if (rb && mergable_maps(merge, em)) { | 
 | 271 | 			em->start = merge->start; | 
 | 272 | 			em->len += merge->len; | 
| Chris Mason | c8b9781 | 2008-10-29 14:49:59 -0400 | [diff] [blame] | 273 | 			em->block_len += merge->block_len; | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 274 | 			em->block_start = merge->block_start; | 
 | 275 | 			merge->in_tree = 0; | 
 | 276 | 			rb_erase(&merge->rb_node, &tree->map); | 
 | 277 | 			free_extent_map(merge); | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 278 | 		} | 
 | 279 | 	 } | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 280 | 	rb = rb_next(&em->rb_node); | 
 | 281 | 	if (rb) | 
 | 282 | 		merge = rb_entry(rb, struct extent_map, rb_node); | 
 | 283 | 	if (rb && mergable_maps(em, merge)) { | 
 | 284 | 		em->len += merge->len; | 
| Chris Mason | c8b9781 | 2008-10-29 14:49:59 -0400 | [diff] [blame] | 285 | 		em->block_len += merge->len; | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 286 | 		rb_erase(&merge->rb_node, &tree->map); | 
 | 287 | 		merge->in_tree = 0; | 
 | 288 | 		free_extent_map(merge); | 
 | 289 | 	} | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 290 | out: | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 291 | 	return ret; | 
 | 292 | } | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 293 |  | 
| Chris Mason | d352ac6 | 2008-09-29 15:18:18 -0400 | [diff] [blame] | 294 | /* simple helper to do math around the end of an extent, handling wrap */ | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 295 | static u64 range_end(u64 start, u64 len) | 
 | 296 | { | 
 | 297 | 	if (start + len < start) | 
 | 298 | 		return (u64)-1; | 
 | 299 | 	return start + len; | 
 | 300 | } | 
 | 301 |  | 
| Christoph Hellwig | 9d2423c | 2008-06-11 21:52:17 -0400 | [diff] [blame] | 302 | /** | 
 | 303 |  * lookup_extent_mapping - lookup extent_map | 
 | 304 |  * @tree:	tree to lookup in | 
 | 305 |  * @start:	byte offset to start the search | 
 | 306 |  * @len:	length of the lookup range | 
 | 307 |  * | 
 | 308 |  * Find and return the first extent_map struct in @tree that intersects the | 
 | 309 |  * [start, len] range.  There may be additional objects in the tree that | 
 | 310 |  * intersect, so check the object returned carefully to make sure that no | 
 | 311 |  * additional lookups are needed. | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 312 |  */ | 
 | 313 | struct extent_map *lookup_extent_mapping(struct extent_map_tree *tree, | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 314 | 					 u64 start, u64 len) | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 315 | { | 
 | 316 | 	struct extent_map *em; | 
 | 317 | 	struct rb_node *rb_node; | 
| Christoph Hellwig | 306929f | 2008-06-10 10:21:04 -0400 | [diff] [blame] | 318 | 	struct rb_node *prev = NULL; | 
 | 319 | 	struct rb_node *next = NULL; | 
 | 320 | 	u64 end = range_end(start, len); | 
 | 321 |  | 
| Chris Mason | 5f56406 | 2008-01-22 16:47:59 -0500 | [diff] [blame] | 322 | 	rb_node = __tree_search(&tree->map, start, &prev, &next); | 
 | 323 | 	if (!rb_node && prev) { | 
 | 324 | 		em = rb_entry(prev, struct extent_map, rb_node); | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 325 | 		if (end > em->start && start < extent_map_end(em)) | 
| Chris Mason | 5f56406 | 2008-01-22 16:47:59 -0500 | [diff] [blame] | 326 | 			goto found; | 
 | 327 | 	} | 
 | 328 | 	if (!rb_node && next) { | 
 | 329 | 		em = rb_entry(next, struct extent_map, rb_node); | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 330 | 		if (end > em->start && start < extent_map_end(em)) | 
| Chris Mason | 5f56406 | 2008-01-22 16:47:59 -0500 | [diff] [blame] | 331 | 			goto found; | 
 | 332 | 	} | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 333 | 	if (!rb_node) { | 
 | 334 | 		em = NULL; | 
 | 335 | 		goto out; | 
 | 336 | 	} | 
 | 337 | 	if (IS_ERR(rb_node)) { | 
 | 338 | 		em = ERR_PTR(PTR_ERR(rb_node)); | 
 | 339 | 		goto out; | 
 | 340 | 	} | 
 | 341 | 	em = rb_entry(rb_node, struct extent_map, rb_node); | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 342 | 	if (end > em->start && start < extent_map_end(em)) | 
 | 343 | 		goto found; | 
 | 344 |  | 
 | 345 | 	em = NULL; | 
 | 346 | 	goto out; | 
 | 347 |  | 
| Chris Mason | 5f56406 | 2008-01-22 16:47:59 -0500 | [diff] [blame] | 348 | found: | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 349 | 	atomic_inc(&em->refs); | 
 | 350 | out: | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 351 | 	return em; | 
 | 352 | } | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 353 |  | 
| Christoph Hellwig | 9d2423c | 2008-06-11 21:52:17 -0400 | [diff] [blame] | 354 | /** | 
| Chris Mason | b917b7c | 2009-09-18 16:07:03 -0400 | [diff] [blame] | 355 |  * search_extent_mapping - find a nearby extent map | 
 | 356 |  * @tree:	tree to lookup in | 
 | 357 |  * @start:	byte offset to start the search | 
 | 358 |  * @len:	length of the lookup range | 
 | 359 |  * | 
 | 360 |  * Find and return the first extent_map struct in @tree that intersects the | 
 | 361 |  * [start, len] range. | 
 | 362 |  * | 
 | 363 |  * If one can't be found, any nearby extent may be returned | 
 | 364 |  */ | 
 | 365 | struct extent_map *search_extent_mapping(struct extent_map_tree *tree, | 
 | 366 | 					 u64 start, u64 len) | 
 | 367 | { | 
 | 368 | 	struct extent_map *em; | 
 | 369 | 	struct rb_node *rb_node; | 
 | 370 | 	struct rb_node *prev = NULL; | 
 | 371 | 	struct rb_node *next = NULL; | 
 | 372 |  | 
 | 373 | 	rb_node = __tree_search(&tree->map, start, &prev, &next); | 
 | 374 | 	if (!rb_node && prev) { | 
 | 375 | 		em = rb_entry(prev, struct extent_map, rb_node); | 
 | 376 | 		goto found; | 
 | 377 | 	} | 
 | 378 | 	if (!rb_node && next) { | 
 | 379 | 		em = rb_entry(next, struct extent_map, rb_node); | 
 | 380 | 		goto found; | 
 | 381 | 	} | 
 | 382 | 	if (!rb_node) { | 
 | 383 | 		em = NULL; | 
 | 384 | 		goto out; | 
 | 385 | 	} | 
 | 386 | 	if (IS_ERR(rb_node)) { | 
 | 387 | 		em = ERR_PTR(PTR_ERR(rb_node)); | 
 | 388 | 		goto out; | 
 | 389 | 	} | 
 | 390 | 	em = rb_entry(rb_node, struct extent_map, rb_node); | 
 | 391 | 	goto found; | 
 | 392 |  | 
 | 393 | 	em = NULL; | 
 | 394 | 	goto out; | 
 | 395 |  | 
 | 396 | found: | 
 | 397 | 	atomic_inc(&em->refs); | 
 | 398 | out: | 
 | 399 | 	return em; | 
 | 400 | } | 
 | 401 |  | 
 | 402 | /** | 
| Christoph Hellwig | 9d2423c | 2008-06-11 21:52:17 -0400 | [diff] [blame] | 403 |  * remove_extent_mapping - removes an extent_map from the extent tree | 
 | 404 |  * @tree:	extent tree to remove from | 
 | 405 |  * @em:		extent map beeing removed | 
 | 406 |  * | 
 | 407 |  * Removes @em from @tree.  No reference counts are dropped, and no checks | 
 | 408 |  * are done to see if the range is in use | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 409 |  */ | 
 | 410 | int remove_extent_mapping(struct extent_map_tree *tree, struct extent_map *em) | 
 | 411 | { | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 412 | 	int ret = 0; | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 413 |  | 
| Chris Mason | 7f3c74f | 2008-07-18 12:01:11 -0400 | [diff] [blame] | 414 | 	WARN_ON(test_bit(EXTENT_FLAG_PINNED, &em->flags)); | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 415 | 	rb_erase(&em->rb_node, &tree->map); | 
 | 416 | 	em->in_tree = 0; | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 417 | 	return ret; | 
 | 418 | } |