| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 1 | #include <linux/err.h> | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 2 | #include <linux/slab.h> | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 3 | #include <linux/module.h> | 
 | 4 | #include <linux/spinlock.h> | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 5 | #include <linux/hardirq.h> | 
| Li Zefan | 261507a0 | 2010-12-17 14:21:50 +0800 | [diff] [blame] | 6 | #include "ctree.h" | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 7 | #include "extent_map.h" | 
 | 8 |  | 
| Chris Mason | 86479a0 | 2007-09-10 19:58:16 -0400 | [diff] [blame] | 9 |  | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 10 | static struct kmem_cache *extent_map_cache; | 
| Chris Mason | ca66462 | 2007-11-27 11:16:35 -0500 | [diff] [blame] | 11 |  | 
| Wyatt Banks | 2f4cbe6 | 2007-11-19 10:22:33 -0500 | [diff] [blame] | 12 | int __init extent_map_init(void) | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 13 | { | 
| Christoph Hellwig | 9601e3f | 2009-04-13 15:33:09 +0200 | [diff] [blame] | 14 | 	extent_map_cache = kmem_cache_create("extent_map", | 
 | 15 | 			sizeof(struct extent_map), 0, | 
 | 16 | 			SLAB_RECLAIM_ACCOUNT | SLAB_MEM_SPREAD, NULL); | 
| Wyatt Banks | 2f4cbe6 | 2007-11-19 10:22:33 -0500 | [diff] [blame] | 17 | 	if (!extent_map_cache) | 
 | 18 | 		return -ENOMEM; | 
| Wyatt Banks | 2f4cbe6 | 2007-11-19 10:22:33 -0500 | [diff] [blame] | 19 | 	return 0; | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 20 | } | 
 | 21 |  | 
| Christian Hesse | 17636e0 | 2007-12-11 09:25:06 -0500 | [diff] [blame] | 22 | void extent_map_exit(void) | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 23 | { | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 24 | 	if (extent_map_cache) | 
 | 25 | 		kmem_cache_destroy(extent_map_cache); | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 26 | } | 
 | 27 |  | 
| Christoph Hellwig | 9d2423c | 2008-06-11 21:52:17 -0400 | [diff] [blame] | 28 | /** | 
 | 29 |  * extent_map_tree_init - initialize extent map tree | 
 | 30 |  * @tree:		tree to initialize | 
| Christoph Hellwig | 9d2423c | 2008-06-11 21:52:17 -0400 | [diff] [blame] | 31 |  * | 
 | 32 |  * Initialize the extent tree @tree.  Should be called for each new inode | 
 | 33 |  * or other user of the extent_map interface. | 
 | 34 |  */ | 
| David Sterba | a8067e0 | 2011-04-21 00:34:43 +0200 | [diff] [blame] | 35 | void extent_map_tree_init(struct extent_map_tree *tree) | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 36 | { | 
| Eric Paris | 6bef4d3 | 2010-02-23 19:43:04 +0000 | [diff] [blame] | 37 | 	tree->map = RB_ROOT; | 
| Chris Mason | 890871b | 2009-09-02 16:24:52 -0400 | [diff] [blame] | 38 | 	rwlock_init(&tree->lock); | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 39 | } | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 40 |  | 
| Christoph Hellwig | 9d2423c | 2008-06-11 21:52:17 -0400 | [diff] [blame] | 41 | /** | 
 | 42 |  * alloc_extent_map - allocate new extent map structure | 
| Christoph Hellwig | 9d2423c | 2008-06-11 21:52:17 -0400 | [diff] [blame] | 43 |  * | 
 | 44 |  * Allocate a new extent_map structure.  The new structure is | 
 | 45 |  * returned with a reference count of one and needs to be | 
 | 46 |  * freed using free_extent_map() | 
 | 47 |  */ | 
| David Sterba | 172ddd6 | 2011-04-21 00:48:27 +0200 | [diff] [blame] | 48 | struct extent_map *alloc_extent_map(void) | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 49 | { | 
 | 50 | 	struct extent_map *em; | 
| David Sterba | 172ddd6 | 2011-04-21 00:48:27 +0200 | [diff] [blame] | 51 | 	em = kmem_cache_alloc(extent_map_cache, GFP_NOFS); | 
| Tsutomu Itoh | c26a920 | 2011-02-14 00:45:29 +0000 | [diff] [blame] | 52 | 	if (!em) | 
 | 53 | 		return NULL; | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 54 | 	em->in_tree = 0; | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 55 | 	em->flags = 0; | 
| Li Zefan | 261507a0 | 2010-12-17 14:21:50 +0800 | [diff] [blame] | 56 | 	em->compress_type = BTRFS_COMPRESS_NONE; | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 57 | 	atomic_set(&em->refs, 1); | 
 | 58 | 	return em; | 
 | 59 | } | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 60 |  | 
| Christoph Hellwig | 9d2423c | 2008-06-11 21:52:17 -0400 | [diff] [blame] | 61 | /** | 
 | 62 |  * free_extent_map - drop reference count of an extent_map | 
 | 63 |  * @em:		extent map beeing releasead | 
 | 64 |  * | 
 | 65 |  * Drops the reference out on @em by one and free the structure | 
 | 66 |  * if the reference count hits zero. | 
 | 67 |  */ | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 68 | void free_extent_map(struct extent_map *em) | 
 | 69 | { | 
| Chris Mason | 2bf5a72 | 2007-08-30 11:54:02 -0400 | [diff] [blame] | 70 | 	if (!em) | 
 | 71 | 		return; | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 72 | 	WARN_ON(atomic_read(&em->refs) == 0); | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 73 | 	if (atomic_dec_and_test(&em->refs)) { | 
 | 74 | 		WARN_ON(em->in_tree); | 
 | 75 | 		kmem_cache_free(extent_map_cache, em); | 
 | 76 | 	} | 
 | 77 | } | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 78 |  | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 79 | static struct rb_node *tree_insert(struct rb_root *root, u64 offset, | 
 | 80 | 				   struct rb_node *node) | 
 | 81 | { | 
| Chris Mason | d397712 | 2009-01-05 21:25:51 -0500 | [diff] [blame] | 82 | 	struct rb_node **p = &root->rb_node; | 
 | 83 | 	struct rb_node *parent = NULL; | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 84 | 	struct extent_map *entry; | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 85 |  | 
| Chris Mason | d397712 | 2009-01-05 21:25:51 -0500 | [diff] [blame] | 86 | 	while (*p) { | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 87 | 		parent = *p; | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 88 | 		entry = rb_entry(parent, struct extent_map, rb_node); | 
 | 89 |  | 
 | 90 | 		WARN_ON(!entry->in_tree); | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 91 |  | 
 | 92 | 		if (offset < entry->start) | 
 | 93 | 			p = &(*p)->rb_left; | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 94 | 		else if (offset >= extent_map_end(entry)) | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 95 | 			p = &(*p)->rb_right; | 
 | 96 | 		else | 
 | 97 | 			return parent; | 
 | 98 | 	} | 
 | 99 |  | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 100 | 	entry = rb_entry(node, struct extent_map, rb_node); | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 101 | 	entry->in_tree = 1; | 
 | 102 | 	rb_link_node(node, parent, p); | 
 | 103 | 	rb_insert_color(node, root); | 
 | 104 | 	return NULL; | 
 | 105 | } | 
 | 106 |  | 
| Chris Mason | d352ac6 | 2008-09-29 15:18:18 -0400 | [diff] [blame] | 107 | /* | 
 | 108 |  * search through the tree for an extent_map with a given offset.  If | 
 | 109 |  * it can't be found, try to find some neighboring extents | 
 | 110 |  */ | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 111 | static struct rb_node *__tree_search(struct rb_root *root, u64 offset, | 
| Chris Mason | 5f56406 | 2008-01-22 16:47:59 -0500 | [diff] [blame] | 112 | 				     struct rb_node **prev_ret, | 
 | 113 | 				     struct rb_node **next_ret) | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 114 | { | 
| Chris Mason | d397712 | 2009-01-05 21:25:51 -0500 | [diff] [blame] | 115 | 	struct rb_node *n = root->rb_node; | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 116 | 	struct rb_node *prev = NULL; | 
| Chris Mason | 5f56406 | 2008-01-22 16:47:59 -0500 | [diff] [blame] | 117 | 	struct rb_node *orig_prev = NULL; | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 118 | 	struct extent_map *entry; | 
 | 119 | 	struct extent_map *prev_entry = NULL; | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 120 |  | 
| Chris Mason | d397712 | 2009-01-05 21:25:51 -0500 | [diff] [blame] | 121 | 	while (n) { | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 122 | 		entry = rb_entry(n, struct extent_map, rb_node); | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 123 | 		prev = n; | 
 | 124 | 		prev_entry = entry; | 
 | 125 |  | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 126 | 		WARN_ON(!entry->in_tree); | 
 | 127 |  | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 128 | 		if (offset < entry->start) | 
 | 129 | 			n = n->rb_left; | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 130 | 		else if (offset >= extent_map_end(entry)) | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 131 | 			n = n->rb_right; | 
 | 132 | 		else | 
 | 133 | 			return n; | 
 | 134 | 	} | 
| Chris Mason | 5f56406 | 2008-01-22 16:47:59 -0500 | [diff] [blame] | 135 |  | 
 | 136 | 	if (prev_ret) { | 
 | 137 | 		orig_prev = prev; | 
| Chris Mason | d397712 | 2009-01-05 21:25:51 -0500 | [diff] [blame] | 138 | 		while (prev && offset >= extent_map_end(prev_entry)) { | 
| Chris Mason | 5f56406 | 2008-01-22 16:47:59 -0500 | [diff] [blame] | 139 | 			prev = rb_next(prev); | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 140 | 			prev_entry = rb_entry(prev, struct extent_map, rb_node); | 
| Chris Mason | 5f56406 | 2008-01-22 16:47:59 -0500 | [diff] [blame] | 141 | 		} | 
 | 142 | 		*prev_ret = prev; | 
 | 143 | 		prev = orig_prev; | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 144 | 	} | 
| Chris Mason | 5f56406 | 2008-01-22 16:47:59 -0500 | [diff] [blame] | 145 |  | 
 | 146 | 	if (next_ret) { | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 147 | 		prev_entry = rb_entry(prev, struct extent_map, rb_node); | 
| Chris Mason | d397712 | 2009-01-05 21:25:51 -0500 | [diff] [blame] | 148 | 		while (prev && offset < prev_entry->start) { | 
| Chris Mason | 5f56406 | 2008-01-22 16:47:59 -0500 | [diff] [blame] | 149 | 			prev = rb_prev(prev); | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 150 | 			prev_entry = rb_entry(prev, struct extent_map, rb_node); | 
| Chris Mason | 5f56406 | 2008-01-22 16:47:59 -0500 | [diff] [blame] | 151 | 		} | 
 | 152 | 		*next_ret = prev; | 
 | 153 | 	} | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 154 | 	return NULL; | 
 | 155 | } | 
 | 156 |  | 
| Chris Mason | d352ac6 | 2008-09-29 15:18:18 -0400 | [diff] [blame] | 157 | /* check to see if two extent_map structs are adjacent and safe to merge */ | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 158 | static int mergable_maps(struct extent_map *prev, struct extent_map *next) | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 159 | { | 
| Chris Mason | 7f3c74f | 2008-07-18 12:01:11 -0400 | [diff] [blame] | 160 | 	if (test_bit(EXTENT_FLAG_PINNED, &prev->flags)) | 
 | 161 | 		return 0; | 
 | 162 |  | 
| Chris Mason | c8b9781 | 2008-10-29 14:49:59 -0400 | [diff] [blame] | 163 | 	/* | 
 | 164 | 	 * don't merge compressed extents, we need to know their | 
 | 165 | 	 * actual size | 
 | 166 | 	 */ | 
 | 167 | 	if (test_bit(EXTENT_FLAG_COMPRESSED, &prev->flags)) | 
 | 168 | 		return 0; | 
 | 169 |  | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 170 | 	if (extent_map_end(prev) == next->start && | 
 | 171 | 	    prev->flags == next->flags && | 
 | 172 | 	    prev->bdev == next->bdev && | 
 | 173 | 	    ((next->block_start == EXTENT_MAP_HOLE && | 
 | 174 | 	      prev->block_start == EXTENT_MAP_HOLE) || | 
 | 175 | 	     (next->block_start == EXTENT_MAP_INLINE && | 
 | 176 | 	      prev->block_start == EXTENT_MAP_INLINE) || | 
 | 177 | 	     (next->block_start == EXTENT_MAP_DELALLOC && | 
 | 178 | 	      prev->block_start == EXTENT_MAP_DELALLOC) || | 
 | 179 | 	     (next->block_start < EXTENT_MAP_LAST_BYTE - 1 && | 
 | 180 | 	      next->block_start == extent_map_block_end(prev)))) { | 
 | 181 | 		return 1; | 
 | 182 | 	} | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 183 | 	return 0; | 
 | 184 | } | 
 | 185 |  | 
| Li Zefan | 4d2c8f6 | 2011-07-14 03:18:33 +0000 | [diff] [blame] | 186 | static void try_merge_map(struct extent_map_tree *tree, struct extent_map *em) | 
| Chris Mason | a1ed835 | 2009-09-11 12:27:37 -0400 | [diff] [blame] | 187 | { | 
| Chris Mason | a1ed835 | 2009-09-11 12:27:37 -0400 | [diff] [blame] | 188 | 	struct extent_map *merge = NULL; | 
 | 189 | 	struct rb_node *rb; | 
| Chris Mason | a1ed835 | 2009-09-11 12:27:37 -0400 | [diff] [blame] | 190 |  | 
 | 191 | 	if (em->start != 0) { | 
 | 192 | 		rb = rb_prev(&em->rb_node); | 
 | 193 | 		if (rb) | 
 | 194 | 			merge = rb_entry(rb, struct extent_map, rb_node); | 
 | 195 | 		if (rb && mergable_maps(merge, em)) { | 
 | 196 | 			em->start = merge->start; | 
 | 197 | 			em->len += merge->len; | 
 | 198 | 			em->block_len += merge->block_len; | 
 | 199 | 			em->block_start = merge->block_start; | 
 | 200 | 			merge->in_tree = 0; | 
 | 201 | 			rb_erase(&merge->rb_node, &tree->map); | 
 | 202 | 			free_extent_map(merge); | 
 | 203 | 		} | 
 | 204 | 	} | 
 | 205 |  | 
 | 206 | 	rb = rb_next(&em->rb_node); | 
 | 207 | 	if (rb) | 
 | 208 | 		merge = rb_entry(rb, struct extent_map, rb_node); | 
 | 209 | 	if (rb && mergable_maps(em, merge)) { | 
 | 210 | 		em->len += merge->len; | 
 | 211 | 		em->block_len += merge->len; | 
 | 212 | 		rb_erase(&merge->rb_node, &tree->map); | 
 | 213 | 		merge->in_tree = 0; | 
 | 214 | 		free_extent_map(merge); | 
 | 215 | 	} | 
| Li Zefan | 4d2c8f6 | 2011-07-14 03:18:33 +0000 | [diff] [blame] | 216 | } | 
 | 217 |  | 
 | 218 | int unpin_extent_cache(struct extent_map_tree *tree, u64 start, u64 len) | 
 | 219 | { | 
 | 220 | 	int ret = 0; | 
 | 221 | 	struct extent_map *em; | 
 | 222 |  | 
 | 223 | 	write_lock(&tree->lock); | 
 | 224 | 	em = lookup_extent_mapping(tree, start, len); | 
 | 225 |  | 
 | 226 | 	WARN_ON(!em || em->start != start); | 
 | 227 |  | 
 | 228 | 	if (!em) | 
 | 229 | 		goto out; | 
 | 230 |  | 
 | 231 | 	clear_bit(EXTENT_FLAG_PINNED, &em->flags); | 
 | 232 |  | 
 | 233 | 	try_merge_map(tree, em); | 
| Chris Mason | a1ed835 | 2009-09-11 12:27:37 -0400 | [diff] [blame] | 234 |  | 
 | 235 | 	free_extent_map(em); | 
 | 236 | out: | 
 | 237 | 	write_unlock(&tree->lock); | 
 | 238 | 	return ret; | 
 | 239 |  | 
 | 240 | } | 
 | 241 |  | 
| Christoph Hellwig | 9d2423c | 2008-06-11 21:52:17 -0400 | [diff] [blame] | 242 | /** | 
 | 243 |  * add_extent_mapping - add new extent map to the extent tree | 
 | 244 |  * @tree:	tree to insert new map in | 
 | 245 |  * @em:		map to insert | 
 | 246 |  * | 
 | 247 |  * Insert @em into @tree or perform a simple forward/backward merge with | 
 | 248 |  * existing mappings.  The extent_map struct passed in will be inserted | 
 | 249 |  * into the tree directly, with an additional reference taken, or a | 
| Lucas De Marchi | 25985ed | 2011-03-30 22:57:33 -0300 | [diff] [blame] | 250 |  * reference dropped if the merge attempt was successful. | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 251 |  */ | 
 | 252 | int add_extent_mapping(struct extent_map_tree *tree, | 
 | 253 | 		       struct extent_map *em) | 
 | 254 | { | 
 | 255 | 	int ret = 0; | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 256 | 	struct rb_node *rb; | 
| Chris Mason | 7c2fe32 | 2008-08-20 08:51:50 -0400 | [diff] [blame] | 257 | 	struct extent_map *exist; | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 258 |  | 
| Chris Mason | 7c2fe32 | 2008-08-20 08:51:50 -0400 | [diff] [blame] | 259 | 	exist = lookup_extent_mapping(tree, em->start, em->len); | 
 | 260 | 	if (exist) { | 
 | 261 | 		free_extent_map(exist); | 
 | 262 | 		ret = -EEXIST; | 
 | 263 | 		goto out; | 
 | 264 | 	} | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 265 | 	rb = tree_insert(&tree->map, em->start, &em->rb_node); | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 266 | 	if (rb) { | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 267 | 		ret = -EEXIST; | 
 | 268 | 		goto out; | 
 | 269 | 	} | 
 | 270 | 	atomic_inc(&em->refs); | 
| Li Zefan | 4d2c8f6 | 2011-07-14 03:18:33 +0000 | [diff] [blame] | 271 |  | 
 | 272 | 	try_merge_map(tree, em); | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 273 | out: | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 274 | 	return ret; | 
 | 275 | } | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 276 |  | 
| Chris Mason | d352ac6 | 2008-09-29 15:18:18 -0400 | [diff] [blame] | 277 | /* simple helper to do math around the end of an extent, handling wrap */ | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 278 | static u64 range_end(u64 start, u64 len) | 
 | 279 | { | 
 | 280 | 	if (start + len < start) | 
 | 281 | 		return (u64)-1; | 
 | 282 | 	return start + len; | 
 | 283 | } | 
 | 284 |  | 
| Li Zefan | ed64f06 | 2011-07-14 03:18:15 +0000 | [diff] [blame] | 285 | struct extent_map *__lookup_extent_mapping(struct extent_map_tree *tree, | 
 | 286 | 					   u64 start, u64 len, int strict) | 
 | 287 | { | 
 | 288 | 	struct extent_map *em; | 
 | 289 | 	struct rb_node *rb_node; | 
 | 290 | 	struct rb_node *prev = NULL; | 
 | 291 | 	struct rb_node *next = NULL; | 
 | 292 | 	u64 end = range_end(start, len); | 
 | 293 |  | 
 | 294 | 	rb_node = __tree_search(&tree->map, start, &prev, &next); | 
 | 295 | 	if (!rb_node) { | 
 | 296 | 		if (prev) | 
 | 297 | 			rb_node = prev; | 
 | 298 | 		else if (next) | 
 | 299 | 			rb_node = next; | 
 | 300 | 		else | 
 | 301 | 			return NULL; | 
 | 302 | 	} | 
 | 303 |  | 
 | 304 | 	em = rb_entry(rb_node, struct extent_map, rb_node); | 
 | 305 |  | 
 | 306 | 	if (strict && !(end > em->start && start < extent_map_end(em))) | 
 | 307 | 		return NULL; | 
 | 308 |  | 
 | 309 | 	atomic_inc(&em->refs); | 
 | 310 | 	return em; | 
 | 311 | } | 
 | 312 |  | 
| Christoph Hellwig | 9d2423c | 2008-06-11 21:52:17 -0400 | [diff] [blame] | 313 | /** | 
 | 314 |  * lookup_extent_mapping - lookup extent_map | 
 | 315 |  * @tree:	tree to lookup in | 
 | 316 |  * @start:	byte offset to start the search | 
 | 317 |  * @len:	length of the lookup range | 
 | 318 |  * | 
 | 319 |  * Find and return the first extent_map struct in @tree that intersects the | 
 | 320 |  * [start, len] range.  There may be additional objects in the tree that | 
 | 321 |  * intersect, so check the object returned carefully to make sure that no | 
 | 322 |  * additional lookups are needed. | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 323 |  */ | 
 | 324 | struct extent_map *lookup_extent_mapping(struct extent_map_tree *tree, | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 325 | 					 u64 start, u64 len) | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 326 | { | 
| Li Zefan | ed64f06 | 2011-07-14 03:18:15 +0000 | [diff] [blame] | 327 | 	return __lookup_extent_mapping(tree, start, len, 1); | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 328 | } | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 329 |  | 
| Christoph Hellwig | 9d2423c | 2008-06-11 21:52:17 -0400 | [diff] [blame] | 330 | /** | 
| Chris Mason | b917b7c | 2009-09-18 16:07:03 -0400 | [diff] [blame] | 331 |  * search_extent_mapping - find a nearby extent map | 
 | 332 |  * @tree:	tree to lookup in | 
 | 333 |  * @start:	byte offset to start the search | 
 | 334 |  * @len:	length of the lookup range | 
 | 335 |  * | 
 | 336 |  * Find and return the first extent_map struct in @tree that intersects the | 
 | 337 |  * [start, len] range. | 
 | 338 |  * | 
 | 339 |  * If one can't be found, any nearby extent may be returned | 
 | 340 |  */ | 
 | 341 | struct extent_map *search_extent_mapping(struct extent_map_tree *tree, | 
 | 342 | 					 u64 start, u64 len) | 
 | 343 | { | 
| Li Zefan | ed64f06 | 2011-07-14 03:18:15 +0000 | [diff] [blame] | 344 | 	return __lookup_extent_mapping(tree, start, len, 0); | 
| Chris Mason | b917b7c | 2009-09-18 16:07:03 -0400 | [diff] [blame] | 345 | } | 
 | 346 |  | 
 | 347 | /** | 
| Christoph Hellwig | 9d2423c | 2008-06-11 21:52:17 -0400 | [diff] [blame] | 348 |  * remove_extent_mapping - removes an extent_map from the extent tree | 
 | 349 |  * @tree:	extent tree to remove from | 
 | 350 |  * @em:		extent map beeing removed | 
 | 351 |  * | 
 | 352 |  * Removes @em from @tree.  No reference counts are dropped, and no checks | 
 | 353 |  * are done to see if the range is in use | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 354 |  */ | 
 | 355 | int remove_extent_mapping(struct extent_map_tree *tree, struct extent_map *em) | 
 | 356 | { | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 357 | 	int ret = 0; | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 358 |  | 
| Chris Mason | 7f3c74f | 2008-07-18 12:01:11 -0400 | [diff] [blame] | 359 | 	WARN_ON(test_bit(EXTENT_FLAG_PINNED, &em->flags)); | 
| Chris Mason | d1310b2 | 2008-01-24 16:13:08 -0500 | [diff] [blame] | 360 | 	rb_erase(&em->rb_node, &tree->map); | 
 | 361 | 	em->in_tree = 0; | 
| Chris Mason | a52d9a8 | 2007-08-27 16:49:44 -0400 | [diff] [blame] | 362 | 	return ret; | 
 | 363 | } |