1 // Copyright 2013 Google Inc. All Rights Reserved.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 //
15 // A btree implementation of the STL set and map interfaces. A btree is both
16 // smaller and faster than STL set/map. The red-black tree implementation of
17 // STL set/map has an overhead of 3 pointers (left, right and parent) plus the
18 // node color information for each stored value. So a set<int32> consumes 20
19 // bytes for each value stored. This btree implementation stores multiple
20 // values on fixed size nodes (usually 256 bytes) and doesn't store child
21 // pointers for leaf nodes. The result is that a btree_set<int32> may use much
22 // less memory per stored value. For the random insertion benchmark in
23 // btree_test.cc, a btree_set<int32> with node-size of 256 uses 4.9 bytes per
24 // stored value.
25 //
26 // The packing of multiple values on to each node of a btree has another effect
27 // besides better space utilization: better cache locality due to fewer cache
28 // lines being accessed. Better cache locality translates into faster
29 // operations.
30 //
31 // CAVEATS
32 //
33 // Insertions and deletions on a btree can cause splitting, merging or
34 // rebalancing of btree nodes. And even without these operations, insertions
35 // and deletions on a btree will move values around within a node. In both
36 // cases, the result is that insertions and deletions can invalidate iterators
37 // pointing to values other than the one being inserted/deleted. This is
38 // notably different from STL set/map which takes care to not invalidate
39 // iterators on insert/erase except, of course, for iterators pointing to the
40 // value being erased. A partial workaround when erasing is available:
41 // erase() returns an iterator pointing to the item just after the one that was
42 // erased (or end() if none exists). See also safe_btree.
43
44 // PERFORMANCE
45 //
46 // btree_bench --benchmarks=. 2>&1 | ./benchmarks.awk
47 //
48 // Run on pmattis-warp.nyc (4 X 2200 MHz CPUs); 2010/03/04-15:23:06
49 // Benchmark STL(ns) B-Tree(ns) @ <size>
50 // --------------------------------------------------------
51 // BM_set_int32_insert 1516 608 +59.89% <256> [40.0, 5.2]
52 // BM_set_int32_lookup 1160 414 +64.31% <256> [40.0, 5.2]
53 // BM_set_int32_fulllookup 960 410 +57.29% <256> [40.0, 4.4]
54 // BM_set_int32_delete 1741 528 +69.67% <256> [40.0, 5.2]
55 // BM_set_int32_queueaddrem 3078 1046 +66.02% <256> [40.0, 5.5]
56 // BM_set_int32_mixedaddrem 3600 1384 +61.56% <256> [40.0, 5.3]
57 // BM_set_int32_fifo 227 113 +50.22% <256> [40.0, 4.4]
58 // BM_set_int32_fwditer 158 26 +83.54% <256> [40.0, 5.2]
59 // BM_map_int32_insert 1551 636 +58.99% <256> [48.0, 10.5]
60 // BM_map_int32_lookup 1200 508 +57.67% <256> [48.0, 10.5]
61 // BM_map_int32_fulllookup 989 487 +50.76% <256> [48.0, 8.8]
62 // BM_map_int32_delete 1794 628 +64.99% <256> [48.0, 10.5]
63 // BM_map_int32_queueaddrem 3189 1266 +60.30% <256> [48.0, 11.6]
64 // BM_map_int32_mixedaddrem 3822 1623 +57.54% <256> [48.0, 10.9]
65 // BM_map_int32_fifo 151 134 +11.26% <256> [48.0, 8.8]
66 // BM_map_int32_fwditer 161 32 +80.12% <256> [48.0, 10.5]
67 // BM_set_int64_insert 1546 636 +58.86% <256> [40.0, 10.5]
68 // BM_set_int64_lookup 1200 512 +57.33% <256> [40.0, 10.5]
69 // BM_set_int64_fulllookup 971 487 +49.85% <256> [40.0, 8.8]
70 // BM_set_int64_delete 1745 616 +64.70% <256> [40.0, 10.5]
71 // BM_set_int64_queueaddrem 3163 1195 +62.22% <256> [40.0, 11.6]
72 // BM_set_int64_mixedaddrem 3760 1564 +58.40% <256> [40.0, 10.9]
73 // BM_set_int64_fifo 146 103 +29.45% <256> [40.0, 8.8]
74 // BM_set_int64_fwditer 162 31 +80.86% <256> [40.0, 10.5]
75 // BM_map_int64_insert 1551 720 +53.58% <256> [48.0, 20.7]
76 // BM_map_int64_lookup 1214 612 +49.59% <256> [48.0, 20.7]
77 // BM_map_int64_fulllookup 994 592 +40.44% <256> [48.0, 17.2]
78 // BM_map_int64_delete 1778 764 +57.03% <256> [48.0, 20.7]
79 // BM_map_int64_queueaddrem 3189 1547 +51.49% <256> [48.0, 20.9]
80 // BM_map_int64_mixedaddrem 3779 1887 +50.07% <256> [48.0, 21.6]
81 // BM_map_int64_fifo 147 145 +1.36% <256> [48.0, 17.2]
82 // BM_map_int64_fwditer 162 41 +74.69% <256> [48.0, 20.7]
83 // BM_set_string_insert 1989 1966 +1.16% <256> [64.0, 44.5]
84 // BM_set_string_lookup 1709 1600 +6.38% <256> [64.0, 44.5]
85 // BM_set_string_fulllookup 1573 1529 +2.80% <256> [64.0, 35.4]
86 // BM_set_string_delete 2520 1920 +23.81% <256> [64.0, 44.5]
87 // BM_set_string_queueaddrem 4706 4309 +8.44% <256> [64.0, 48.3]
88 // BM_set_string_mixedaddrem 5080 4654 +8.39% <256> [64.0, 46.7]
89 // BM_set_string_fifo 318 512 -61.01% <256> [64.0, 35.4]
90 // BM_set_string_fwditer 182 93 +48.90% <256> [64.0, 44.5]
91 // BM_map_string_insert 2600 2227 +14.35% <256> [72.0, 55.8]
92 // BM_map_string_lookup 2068 1730 +16.34% <256> [72.0, 55.8]
93 // BM_map_string_fulllookup 1859 1618 +12.96% <256> [72.0, 44.0]
94 // BM_map_string_delete 3168 2080 +34.34% <256> [72.0, 55.8]
95 // BM_map_string_queueaddrem 5840 4701 +19.50% <256> [72.0, 59.4]
96 // BM_map_string_mixedaddrem 6400 5200 +18.75% <256> [72.0, 57.8]
97 // BM_map_string_fifo 398 596 -49.75% <256> [72.0, 44.0]
98 // BM_map_string_fwditer 243 113 +53.50% <256> [72.0, 55.8]
99
100 #ifndef UTIL_BTREE_BTREE_H__
101 #define UTIL_BTREE_BTREE_H__
102
103 #include <stddef.h>
104 #include <string.h>
105 #include <sys/types.h>
106 #include <algorithm>
107 #include <functional>
108 #include <iostream>
109 #include <iterator>
110 #include <limits>
111 #include <type_traits>
112 #include <new>
113 #include <ostream>
114 #include <string>
115 #include <utility>
116
117 #include "include/ceph_assert.h"
118
119 namespace btree {
120
121 // Inside a btree method, if we just call swap(), it will choose the
122 // btree::swap method, which we don't want. And we can't say ::swap
123 // because then MSVC won't pickup any std::swap() implementations. We
124 // can't just use std::swap() directly because then we don't get the
125 // specialization for types outside the std namespace. So the solution
126 // is to have a special swap helper function whose name doesn't
127 // collide with other swap functions defined by the btree classes.
128 template <typename T>
129 inline void btree_swap_helper(T &a, T &b) {
130 using std::swap;
131 swap(a, b);
132 }
133
134 // A template helper used to select A or B based on a condition.
135 template<bool cond, typename A, typename B>
136 struct if_{
137 typedef A type;
138 };
139
140 template<typename A, typename B>
141 struct if_<false, A, B> {
142 typedef B type;
143 };
144
145 // Types small_ and big_ are promise that sizeof(small_) < sizeof(big_)
146 typedef char small_;
147
148 struct big_ {
149 char dummy[2];
150 };
151
152 // A compile-time assertion.
153 template <bool>
154 struct CompileAssert {
155 };
156
157 #define COMPILE_ASSERT(expr, msg) \
158 typedef CompileAssert<(bool(expr))> msg[bool(expr) ? 1 : -1]
159
160 // A helper type used to indicate that a key-compare-to functor has been
161 // provided. A user can specify a key-compare-to functor by doing:
162 //
163 // struct MyStringComparer
164 // : public util::btree::btree_key_compare_to_tag {
165 // int operator()(const string &a, const string &b) const {
166 // return a.compare(b);
167 // }
168 // };
169 //
170 // Note that the return type is an int and not a bool. There is a
171 // COMPILE_ASSERT which enforces this return type.
172 struct btree_key_compare_to_tag {
173 };
174
175 // A helper class that indicates if the Compare parameter is derived from
176 // btree_key_compare_to_tag.
177 template <typename Compare>
178 struct btree_is_key_compare_to
179 : public std::is_convertible<Compare, btree_key_compare_to_tag> {
180 };
181
182 // A helper class to convert a boolean comparison into a three-way
183 // "compare-to" comparison that returns a negative value to indicate
184 // less-than, zero to indicate equality and a positive value to
185 // indicate greater-than. This helper class is specialized for
186 // less<string> and greater<string>. The btree_key_compare_to_adapter
187 // class is provided so that btree users automatically get the more
188 // efficient compare-to code when using common google string types
189 // with common comparison functors.
190 template <typename Compare>
191 struct btree_key_compare_to_adapter : Compare {
192 btree_key_compare_to_adapter() { }
193 btree_key_compare_to_adapter(const Compare &c) : Compare(c) { }
194 btree_key_compare_to_adapter(const btree_key_compare_to_adapter<Compare> &c)
195 : Compare(c) {
196 }
197 };
198
199 template <>
200 struct btree_key_compare_to_adapter<std::less<std::string> >
201 : public btree_key_compare_to_tag {
202 btree_key_compare_to_adapter() {}
203 btree_key_compare_to_adapter(const std::less<std::string>&) {}
204 btree_key_compare_to_adapter(
205 const btree_key_compare_to_adapter<std::less<std::string> >&) {}
206 int operator()(const std::string &a, const std::string &b) const {
207 return a.compare(b);
208 }
209 };
210
211 template <>
212 struct btree_key_compare_to_adapter<std::greater<std::string> >
213 : public btree_key_compare_to_tag {
214 btree_key_compare_to_adapter() {}
215 btree_key_compare_to_adapter(const std::greater<std::string>&) {}
216 btree_key_compare_to_adapter(
217 const btree_key_compare_to_adapter<std::greater<std::string> >&) {}
218 int operator()(const std::string &a, const std::string &b) const {
219 return b.compare(a);
220 }
221 };
222
223 // A helper class that allows a compare-to functor to behave like a plain
224 // compare functor. This specialization is used when we do not have a
225 // compare-to functor.
226 template <typename Key, typename Compare, bool HaveCompareTo>
227 struct btree_key_comparer {
228 btree_key_comparer() {}
229 btree_key_comparer(Compare c) : comp(c) {}
230 static bool bool_compare(const Compare &comp, const Key &x, const Key &y) {
231 return comp(x, y);
232 }
233 bool operator()(const Key &x, const Key &y) const {
234 return bool_compare(comp, x, y);
235 }
236 Compare comp;
237 };
238
239 // A specialization of btree_key_comparer when a compare-to functor is
240 // present. We need a plain (boolean) comparison in some parts of the btree
241 // code, such as insert-with-hint.
242 template <typename Key, typename Compare>
243 struct btree_key_comparer<Key, Compare, true> {
244 btree_key_comparer() {}
245 btree_key_comparer(Compare c) : comp(c) {}
246 static bool bool_compare(const Compare &comp, const Key &x, const Key &y) {
247 return comp(x, y) < 0;
248 }
249 bool operator()(const Key &x, const Key &y) const {
250 return bool_compare(comp, x, y);
251 }
252 Compare comp;
253 };
254
255 // A helper function to compare to keys using the specified compare
256 // functor. This dispatches to the appropriate btree_key_comparer comparison,
257 // depending on whether we have a compare-to functor or not (which depends on
258 // whether Compare is derived from btree_key_compare_to_tag).
259 template <typename Key, typename Compare>
260 static bool btree_compare_keys(
261 const Compare &comp, const Key &x, const Key &y) {
262 typedef btree_key_comparer<Key, Compare,
263 btree_is_key_compare_to<Compare>::value> key_comparer;
264 return key_comparer::bool_compare(comp, x, y);
265 }
266
267 template <typename Key, typename Compare,
268 typename Alloc, int TargetNodeSize, int ValueSize>
269 struct btree_common_params {
270 // If Compare is derived from btree_key_compare_to_tag then use it as the
271 // key_compare type. Otherwise, use btree_key_compare_to_adapter<> which will
272 // fall-back to Compare if we don't have an appropriate specialization.
273 typedef typename if_<
274 btree_is_key_compare_to<Compare>::value,
275 Compare, btree_key_compare_to_adapter<Compare> >::type key_compare;
276 // A type which indicates if we have a key-compare-to functor or a plain old
277 // key-compare functor.
278 typedef btree_is_key_compare_to<key_compare> is_key_compare_to;
279
280 typedef Alloc allocator_type;
281 typedef Key key_type;
282 typedef ssize_t size_type;
283 typedef ptrdiff_t difference_type;
284
285 enum {
286 kTargetNodeSize = TargetNodeSize,
287
288 // Available space for values. This is largest for leaf nodes,
289 // which has overhead no fewer than two pointers.
290 kNodeValueSpace = TargetNodeSize - 2 * sizeof(void*),
291 };
292
293 // This is an integral type large enough to hold as many
294 // ValueSize-values as will fit a node of TargetNodeSize bytes.
295 typedef typename if_<
296 (kNodeValueSpace / ValueSize) >= 256,
297 uint16_t,
298 uint8_t>::type node_count_type;
299 };
300
301 // A parameters structure for holding the type parameters for a btree_map.
302 template <typename Key, typename Data, typename Compare,
303 typename Alloc, int TargetNodeSize>
304 struct btree_map_params
305 : public btree_common_params<Key, Compare, Alloc, TargetNodeSize,
306 sizeof(Key) + sizeof(Data)> {
307 typedef Data data_type;
308 typedef Data mapped_type;
309 typedef std::pair<const Key, data_type> value_type;
310 typedef std::pair<Key, data_type> mutable_value_type;
311 typedef value_type* pointer;
312 typedef const value_type* const_pointer;
313 typedef value_type& reference;
314 typedef const value_type& const_reference;
315
316 enum {
317 kValueSize = sizeof(Key) + sizeof(data_type),
318 };
319
320 static const Key& key(const value_type &x) { return x.first; }
321 static const Key& key(const mutable_value_type &x) { return x.first; }
322 static void swap(mutable_value_type *a, mutable_value_type *b) {
323 btree_swap_helper(a->first, b->first);
324 btree_swap_helper(a->second, b->second);
325 }
326 };
327
328 // A parameters structure for holding the type parameters for a btree_set.
329 template <typename Key, typename Compare, typename Alloc, int TargetNodeSize>
330 struct btree_set_params
331 : public btree_common_params<Key, Compare, Alloc, TargetNodeSize,
332 sizeof(Key)> {
333 typedef std::false_type data_type;
334 typedef std::false_type mapped_type;
335 typedef Key value_type;
336 typedef value_type mutable_value_type;
337 typedef value_type* pointer;
338 typedef const value_type* const_pointer;
339 typedef value_type& reference;
340 typedef const value_type& const_reference;
341
342 enum {
343 kValueSize = sizeof(Key),
344 };
345
346 static const Key& key(const value_type &x) { return x; }
347 static void swap(mutable_value_type *a, mutable_value_type *b) {
348 btree_swap_helper<mutable_value_type>(*a, *b);
349 }
350 };
351
352 // An adapter class that converts a lower-bound compare into an upper-bound
353 // compare.
354 template <typename Key, typename Compare>
355 struct btree_upper_bound_adapter : public Compare {
356 btree_upper_bound_adapter(Compare c) : Compare(c) {}
357 bool operator()(const Key &a, const Key &b) const {
358 return !static_cast<const Compare&>(*this)(b, a);
359 }
360 };
361
362 template <typename Key, typename CompareTo>
363 struct btree_upper_bound_compare_to_adapter : public CompareTo {
364 btree_upper_bound_compare_to_adapter(CompareTo c) : CompareTo(c) {}
365 int operator()(const Key &a, const Key &b) const {
366 return static_cast<const CompareTo&>(*this)(b, a);
367 }
368 };
369
370 // Dispatch helper class for using linear search with plain compare.
371 template <typename K, typename N, typename Compare>
372 struct btree_linear_search_plain_compare {
373 static int lower_bound(const K &k, const N &n, Compare comp) {
374 return n.linear_search_plain_compare(k, 0, n.count(), comp);
375 }
376 static int upper_bound(const K &k, const N &n, Compare comp) {
377 typedef btree_upper_bound_adapter<K, Compare> upper_compare;
378 return n.linear_search_plain_compare(k, 0, n.count(), upper_compare(comp));
379 }
380 };
381
382 // Dispatch helper class for using linear search with compare-to
383 template <typename K, typename N, typename CompareTo>
384 struct btree_linear_search_compare_to {
385 static int lower_bound(const K &k, const N &n, CompareTo comp) {
386 return n.linear_search_compare_to(k, 0, n.count(), comp);
387 }
388 static int upper_bound(const K &k, const N &n, CompareTo comp) {
389 typedef btree_upper_bound_adapter<K,
390 btree_key_comparer<K, CompareTo, true> > upper_compare;
391 return n.linear_search_plain_compare(k, 0, n.count(), upper_compare(comp));
392 }
393 };
394
395 // Dispatch helper class for using binary search with plain compare.
396 template <typename K, typename N, typename Compare>
397 struct btree_binary_search_plain_compare {
398 static int lower_bound(const K &k, const N &n, Compare comp) {
399 return n.binary_search_plain_compare(k, 0, n.count(), comp);
400 }
401 static int upper_bound(const K &k, const N &n, Compare comp) {
402 typedef btree_upper_bound_adapter<K, Compare> upper_compare;
403 return n.binary_search_plain_compare(k, 0, n.count(), upper_compare(comp));
404 }
405 };
406
407 // Dispatch helper class for using binary search with compare-to.
408 template <typename K, typename N, typename CompareTo>
409 struct btree_binary_search_compare_to {
410 static int lower_bound(const K &k, const N &n, CompareTo comp) {
411 return n.binary_search_compare_to(k, 0, n.count(), CompareTo());
412 }
413 static int upper_bound(const K &k, const N &n, CompareTo comp) {
414 typedef btree_upper_bound_adapter<K,
415 btree_key_comparer<K, CompareTo, true> > upper_compare;
416 return n.linear_search_plain_compare(k, 0, n.count(), upper_compare(comp));
417 }
418 };
419
420 // A node in the btree holding. The same node type is used for both internal
421 // and leaf nodes in the btree, though the nodes are allocated in such a way
422 // that the children array is only valid in internal nodes.
423 template <typename Params>
424 class btree_node {
425 public:
426 typedef Params params_type;
427 typedef btree_node<Params> self_type;
428 typedef typename Params::key_type key_type;
429 typedef typename Params::data_type data_type;
430 typedef typename Params::value_type value_type;
431 typedef typename Params::mutable_value_type mutable_value_type;
432 typedef typename Params::pointer pointer;
433 typedef typename Params::const_pointer const_pointer;
434 typedef typename Params::reference reference;
435 typedef typename Params::const_reference const_reference;
436 typedef typename Params::key_compare key_compare;
437 typedef typename Params::size_type size_type;
438 typedef typename Params::difference_type difference_type;
439 // Typedefs for the various types of node searches.
440 typedef btree_linear_search_plain_compare<
441 key_type, self_type, key_compare> linear_search_plain_compare_type;
442 typedef btree_linear_search_compare_to<
443 key_type, self_type, key_compare> linear_search_compare_to_type;
444 typedef btree_binary_search_plain_compare<
445 key_type, self_type, key_compare> binary_search_plain_compare_type;
446 typedef btree_binary_search_compare_to<
447 key_type, self_type, key_compare> binary_search_compare_to_type;
448 // If we have a valid key-compare-to type, use linear_search_compare_to,
449 // otherwise use linear_search_plain_compare.
450 typedef typename if_<
451 Params::is_key_compare_to::value,
452 linear_search_compare_to_type,
453 linear_search_plain_compare_type>::type linear_search_type;
454 // If we have a valid key-compare-to type, use binary_search_compare_to,
455 // otherwise use binary_search_plain_compare.
456 typedef typename if_<
457 Params::is_key_compare_to::value,
458 binary_search_compare_to_type,
459 binary_search_plain_compare_type>::type binary_search_type;
460 // If the key is an integral or floating point type, use linear search which
461 // is faster than binary search for such types. Might be wise to also
462 // configure linear search based on node-size.
463 typedef typename if_<
464 std::is_integral<key_type>::value ||
465 std::is_floating_point<key_type>::value,
466 linear_search_type, binary_search_type>::type search_type;
467
468 struct base_fields {
469 typedef typename Params::node_count_type field_type;
470
471 // A boolean indicating whether the node is a leaf or not.
472 bool leaf;
473 // The position of the node in the node's parent.
474 field_type position;
475 // The maximum number of values the node can hold.
476 field_type max_count;
477 // The count of the number of values in the node.
478 field_type count;
479 // A pointer to the node's parent.
480 btree_node *parent;
481 };
482
483 enum {
484 kValueSize = params_type::kValueSize,
485 kTargetNodeSize = params_type::kTargetNodeSize,
486
487 // Compute how many values we can fit onto a leaf node.
488 kNodeTargetValues = (kTargetNodeSize - sizeof(base_fields)) / kValueSize,
489 // We need a minimum of 3 values per internal node in order to perform
490 // splitting (1 value for the two nodes involved in the split and 1 value
491 // propagated to the parent as the delimiter for the split).
492 kNodeValues = kNodeTargetValues >= 3 ? kNodeTargetValues : 3,
493
494 kExactMatch = 1 << 30,
495 kMatchMask = kExactMatch - 1,
496 };
497
498 struct leaf_fields : public base_fields {
499 // The array of values. Only the first count of these values have been
500 // constructed and are valid.
501 mutable_value_type values[kNodeValues];
502 };
503
504 struct internal_fields : public leaf_fields {
505 // The array of child pointers. The keys in children_[i] are all less than
506 // key(i). The keys in children_[i + 1] are all greater than key(i). There
507 // are always count + 1 children.
508 btree_node *children[kNodeValues + 1];
509 };
510
511 struct root_fields : public internal_fields {
512 btree_node *rightmost;
513 size_type size;
514 };
515
516 public:
517 // Getter/setter for whether this is a leaf node or not. This value doesn't
518 // change after the node is created.
519 bool leaf() const { return fields_.leaf; }
520
521 // Getter for the position of this node in its parent.
522 int position() const { return fields_.position; }
523 void set_position(int v) { fields_.position = v; }
524
525 // Getter/setter for the number of values stored in this node.
526 int count() const { return fields_.count; }
527 void set_count(int v) { fields_.count = v; }
528 int max_count() const { return fields_.max_count; }
529
530 // Getter for the parent of this node.
531 btree_node* parent() const { return fields_.parent; }
532 // Getter for whether the node is the root of the tree. The parent of the
533 // root of the tree is the leftmost node in the tree which is guaranteed to
534 // be a leaf.
535 bool is_root() const { return parent()->leaf(); }
536 void make_root() {
537 ceph_assert(parent()->is_root());
538 fields_.parent = fields_.parent->parent();
539 }
540
541 // Getter for the rightmost root node field. Only valid on the root node.
542 btree_node* rightmost() const { return fields_.rightmost; }
543 btree_node** mutable_rightmost() { return &fields_.rightmost; }
544
545 // Getter for the size root node field. Only valid on the root node.
546 size_type size() const { return fields_.size; }
547 size_type* mutable_size() { return &fields_.size; }
548
549 // Getters for the key/value at position i in the node.
550 const key_type& key(int i) const {
551 return params_type::key(fields_.values[i]);
552 }
553 reference value(int i) {
554 return reinterpret_cast<reference>(fields_.values[i]);
555 }
556 const_reference value(int i) const {
557 return reinterpret_cast<const_reference>(fields_.values[i]);
558 }
559 mutable_value_type* mutable_value(int i) {
560 return &fields_.values[i];
561 }
562
563 // Swap value i in this node with value j in node x.
564 void value_swap(int i, btree_node *x, int j) {
565 params_type::swap(mutable_value(i), x->mutable_value(j));
566 }
567
568 // Getters/setter for the child at position i in the node.
569 btree_node* child(int i) const { return fields_.children[i]; }
570 btree_node** mutable_child(int i) { return &fields_.children[i]; }
571 void set_child(int i, btree_node *c) {
572 *mutable_child(i) = c;
573 c->fields_.parent = this;
574 c->fields_.position = i;
575 }
576
577 // Returns the position of the first value whose key is not less than k.
578 template <typename Compare>
579 int lower_bound(const key_type &k, const Compare &comp) const {
580 return search_type::lower_bound(k, *this, comp);
581 }
582 // Returns the position of the first value whose key is greater than k.
583 template <typename Compare>
584 int upper_bound(const key_type &k, const Compare &comp) const {
585 return search_type::upper_bound(k, *this, comp);
586 }
587
588 // Returns the position of the first value whose key is not less than k using
589 // linear search performed using plain compare.
590 template <typename Compare>
591 int linear_search_plain_compare(
592 const key_type &k, int s, int e, const Compare &comp) const {
593 while (s < e) {
594 if (!btree_compare_keys(comp, key(s), k)) {
595 break;
596 }
597 ++s;
598 }
599 return s;
600 }
601
602 // Returns the position of the first value whose key is not less than k using
603 // linear search performed using compare-to.
604 template <typename Compare>
605 int linear_search_compare_to(
606 const key_type &k, int s, int e, const Compare &comp) const {
607 while (s < e) {
608 int c = comp(key(s), k);
609 if (c == 0) {
610 return s | kExactMatch;
611 } else if (c > 0) {
612 break;
613 }
614 ++s;
615 }
616 return s;
617 }
618
619 // Returns the position of the first value whose key is not less than k using
620 // binary search performed using plain compare.
621 template <typename Compare>
622 int binary_search_plain_compare(
623 const key_type &k, int s, int e, const Compare &comp) const {
624 while (s != e) {
625 int mid = (s + e) / 2;
626 if (btree_compare_keys(comp, key(mid), k)) {
627 s = mid + 1;
628 } else {
629 e = mid;
630 }
631 }
632 return s;
633 }
634
635 // Returns the position of the first value whose key is not less than k using
636 // binary search performed using compare-to.
637 template <typename CompareTo>
638 int binary_search_compare_to(
639 const key_type &k, int s, int e, const CompareTo &comp) const {
640 while (s != e) {
641 int mid = (s + e) / 2;
642 int c = comp(key(mid), k);
643 if (c < 0) {
644 s = mid + 1;
645 } else if (c > 0) {
646 e = mid;
647 } else {
648 // Need to return the first value whose key is not less than k, which
649 // requires continuing the binary search. Note that we are guaranteed
650 // that the result is an exact match because if "key(mid-1) < k" the
651 // call to binary_search_compare_to() will return "mid".
652 s = binary_search_compare_to(k, s, mid, comp);
653 return s | kExactMatch;
654 }
655 }
656 return s;
657 }
658
659 // Inserts the value x at position i, shifting all existing values and
660 // children at positions >= i to the right by 1.
661 void insert_value(int i, const value_type &x);
662
663 // Removes the value at position i, shifting all existing values and children
664 // at positions > i to the left by 1.
665 void remove_value(int i);
666
667 // Rebalances a node with its right sibling.
668 void rebalance_right_to_left(btree_node *sibling, int to_move);
669 void rebalance_left_to_right(btree_node *sibling, int to_move);
670
671 // Splits a node, moving a portion of the node's values to its right sibling.
672 void split(btree_node *sibling, int insert_position);
673
674 // Merges a node with its right sibling, moving all of the values and the
675 // delimiting key in the parent node onto itself.
676 void merge(btree_node *sibling);
677
678 // Swap the contents of "this" and "src".
679 void swap(btree_node *src);
680
681 #ifdef NDEBUG
682 static constexpr auto no_debug = true;
683 #else
684 static constexpr auto no_debug = false;
685 #endif
686 // Node allocation/deletion routines.
687 static btree_node* init_leaf(
688 leaf_fields *f, btree_node *parent, int max_count) {
689 btree_node *n = reinterpret_cast<btree_node*>(f);
690 f->leaf = 1;
691 f->position = 0;
692 f->max_count = max_count;
693 f->count = 0;
694 f->parent = parent;
695 if (!no_debug) {
696 memset(&f->values, 0, max_count * sizeof(value_type));
697 }
698 return n;
699 }
700 static btree_node* init_internal(internal_fields *f, btree_node *parent) {
701 btree_node *n = init_leaf(f, parent, kNodeValues);
702 f->leaf = 0;
703 if (!no_debug) {
704 memset(f->children, 0, sizeof(f->children));
705 }
706 return n;
707 }
708 static btree_node* init_root(root_fields *f, btree_node *parent) {
709 btree_node *n = init_internal(f, parent);
710 f->rightmost = parent;
711 f->size = parent->count();
712 return n;
713 }
714 void destroy() {
715 for (int i = 0; i < count(); ++i) {
716 value_destroy(i);
717 }
718 }
719
720 private:
721 void value_init(int i) {
722 new (&fields_.values[i]) mutable_value_type;
723 }
724 void value_init(int i, const value_type &x) {
725 new (&fields_.values[i]) mutable_value_type(x);
726 }
727 void value_destroy(int i) {
728 fields_.values[i].~mutable_value_type();
729 }
730
731 private:
732 root_fields fields_;
733
734 private:
735 btree_node(const btree_node&);
736 void operator=(const btree_node&);
737 };
738
739 template <typename Node, typename Reference, typename Pointer>
740 struct btree_iterator {
741 typedef typename Node::key_type key_type;
742 typedef typename Node::size_type size_type;
743 typedef typename Node::difference_type difference_type;
744 typedef typename Node::params_type params_type;
745
746 typedef Node node_type;
747 typedef typename std::remove_const<Node>::type normal_node;
748 typedef const Node const_node;
749 typedef typename params_type::value_type value_type;
750 typedef typename params_type::pointer normal_pointer;
751 typedef typename params_type::reference normal_reference;
752 typedef typename params_type::const_pointer const_pointer;
753 typedef typename params_type::const_reference const_reference;
754
755 typedef Pointer pointer;
756 typedef Reference reference;
757 typedef std::bidirectional_iterator_tag iterator_category;
758
759 typedef btree_iterator<
760 normal_node, normal_reference, normal_pointer> iterator;
761 typedef btree_iterator<
762 const_node, const_reference, const_pointer> const_iterator;
763 typedef btree_iterator<Node, Reference, Pointer> self_type;
764
765 btree_iterator()
766 : node(NULL),
767 position(-1) {
768 }
769 btree_iterator(Node *n, int p)
770 : node(n),
771 position(p) {
772 }
773 btree_iterator(const iterator &x)
774 : node(x.node),
775 position(x.position) {
776 }
777
778 // Increment/decrement the iterator.
779 void increment() {
780 if (node->leaf() && ++position < node->count()) {
781 return;
782 }
783 increment_slow();
784 }
785 void increment_by(int count);
786 void increment_slow();
787
788 void decrement() {
789 if (node->leaf() && --position >= 0) {
790 return;
791 }
792 decrement_slow();
793 }
794 void decrement_slow();
795
796 bool operator==(const const_iterator &x) const {
797 return node == x.node && position == x.position;
798 }
799 bool operator!=(const const_iterator &x) const {
800 return node != x.node || position != x.position;
801 }
802
803 // Accessors for the key/value the iterator is pointing at.
804 const key_type& key() const {
805 return node->key(position);
806 }
807 reference operator*() const {
808 return node->value(position);
809 }
810 pointer operator->() const {
811 return &node->value(position);
812 }
813
814 self_type& operator++() {
815 increment();
816 return *this;
817 }
818 self_type& operator--() {
819 decrement();
820 return *this;
821 }
822 self_type operator++(int) {
823 self_type tmp = *this;
824 ++*this;
825 return tmp;
826 }
827 self_type operator--(int) {
828 self_type tmp = *this;
829 --*this;
830 return tmp;
831 }
832
833 // The node in the tree the iterator is pointing at.
834 Node *node;
835 // The position within the node of the tree the iterator is pointing at.
836 int position;
837 };
838
839 // Dispatch helper class for using btree::internal_locate with plain compare.
840 struct btree_internal_locate_plain_compare {
841 template <typename K, typename T, typename Iter>
842 static std::pair<Iter, int> dispatch(const K &k, const T &t, Iter iter) {
843 return t.internal_locate_plain_compare(k, iter);
844 }
845 };
846
847 // Dispatch helper class for using btree::internal_locate with compare-to.
848 struct btree_internal_locate_compare_to {
849 template <typename K, typename T, typename Iter>
850 static std::pair<Iter, int> dispatch(const K &k, const T &t, Iter iter) {
851 return t.internal_locate_compare_to(k, iter);
852 }
853 };
854
855 template <typename Params>
856 class btree : public Params::key_compare {
857 typedef btree<Params> self_type;
858 typedef btree_node<Params> node_type;
859 typedef typename node_type::base_fields base_fields;
860 typedef typename node_type::leaf_fields leaf_fields;
861 typedef typename node_type::internal_fields internal_fields;
862 typedef typename node_type::root_fields root_fields;
863 typedef typename Params::is_key_compare_to is_key_compare_to;
864
865 friend class btree_internal_locate_plain_compare;
866 friend class btree_internal_locate_compare_to;
867 typedef typename if_<
868 is_key_compare_to::value,
869 btree_internal_locate_compare_to,
870 btree_internal_locate_plain_compare>::type internal_locate_type;
871
872 enum {
873 kNodeValues = node_type::kNodeValues,
874 kMinNodeValues = kNodeValues / 2,
875 kValueSize = node_type::kValueSize,
876 kExactMatch = node_type::kExactMatch,
877 kMatchMask = node_type::kMatchMask,
878 };
879
880 // A helper class to get the empty base class optimization for 0-size
881 // allocators. Base is internal_allocator_type.
882 // (e.g. empty_base_handle<internal_allocator_type, node_type*>). If Base is
883 // 0-size, the compiler doesn't have to reserve any space for it and
884 // sizeof(empty_base_handle) will simply be sizeof(Data). Google [empty base
885 // class optimization] for more details.
886 template <typename Base, typename Data>
887 struct empty_base_handle : public Base {
888 empty_base_handle(const Base &b, const Data &d)
889 : Base(b),
890 data(d) {
891 }
892 Data data;
893 };
894
895 struct node_stats {
896 node_stats(ssize_t l, ssize_t i)
897 : leaf_nodes(l),
898 internal_nodes(i) {
899 }
900
901 node_stats& operator+=(const node_stats &x) {
902 leaf_nodes += x.leaf_nodes;
903 internal_nodes += x.internal_nodes;
904 return *this;
905 }
906
907 ssize_t leaf_nodes;
908 ssize_t internal_nodes;
909 };
910
911 public:
912 typedef Params params_type;
913 typedef typename Params::key_type key_type;
914 typedef typename Params::data_type data_type;
915 typedef typename Params::mapped_type mapped_type;
916 typedef typename Params::value_type value_type;
917 typedef typename Params::key_compare key_compare;
918 typedef typename Params::pointer pointer;
919 typedef typename Params::const_pointer const_pointer;
920 typedef typename Params::reference reference;
921 typedef typename Params::const_reference const_reference;
922 typedef typename Params::size_type size_type;
923 typedef typename Params::difference_type difference_type;
924 typedef btree_iterator<node_type, reference, pointer> iterator;
925 typedef typename iterator::const_iterator const_iterator;
926 typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
927 typedef std::reverse_iterator<iterator> reverse_iterator;
928
929 typedef typename Params::allocator_type allocator_type;
930 typedef typename allocator_type::template rebind<char>::other
931 internal_allocator_type;
932
933 public:
934 // Default constructor.
935 btree(const key_compare &comp, const allocator_type &alloc);
936
937 // Copy constructor.
938 btree(const self_type &x);
939
940 // Destructor.
(1) Event exn_spec_violation: |
An exception of type "_ZN5boost16exception_detail10clone_implINS0_19error_info_injectorINSt8ios_base7failureB5cxx11EEEEE" is thrown but the throw list "throw()" doesn't allow it to be thrown. This will cause a call to unexpected() which usually calls terminate(). |
Also see events: |
[fun_call_w_exception] |
941 ~btree() {
(2) Event fun_call_w_exception: |
Called function throws an exception of type "_ZN5boost16exception_detail10clone_implINS0_19error_info_injectorINSt8ios_base7failureB5cxx11EEEEE". [details] |
Also see events: |
[exn_spec_violation] |
942 clear();
943 }
944
945 // Iterator routines.
946 iterator begin() {
947 return iterator(leftmost(), 0);
948 }
949 const_iterator begin() const {
950 return const_iterator(leftmost(), 0);
951 }
952 iterator end() {
953 return iterator(rightmost(), rightmost() ? rightmost()->count() : 0);
954 }
955 const_iterator end() const {
956 return const_iterator(rightmost(), rightmost() ? rightmost()->count() : 0);
957 }
958 reverse_iterator rbegin() {
959 return reverse_iterator(end());
960 }
961 const_reverse_iterator rbegin() const {
962 return const_reverse_iterator(end());
963 }
964 reverse_iterator rend() {
965 return reverse_iterator(begin());
966 }
967 const_reverse_iterator rend() const {
968 return const_reverse_iterator(begin());
969 }
970
971 // Finds the first element whose key is not less than key.
972 iterator lower_bound(const key_type &key) {
973 return internal_end(
974 internal_lower_bound(key, iterator(root(), 0)));
975 }
976 const_iterator lower_bound(const key_type &key) const {
977 return internal_end(
978 internal_lower_bound(key, const_iterator(root(), 0)));
979 }
980
981 // Finds the first element whose key is greater than key.
982 iterator upper_bound(const key_type &key) {
983 return internal_end(
984 internal_upper_bound(key, iterator(root(), 0)));
985 }
986 const_iterator upper_bound(const key_type &key) const {
987 return internal_end(
988 internal_upper_bound(key, const_iterator(root(), 0)));
989 }
990
991 // Finds the range of values which compare equal to key. The first member of
992 // the returned pair is equal to lower_bound(key). The second member pair of
993 // the pair is equal to upper_bound(key).
994 std::pair<iterator,iterator> equal_range(const key_type &key) {
995 return std::make_pair(lower_bound(key), upper_bound(key));
996 }
997 std::pair<const_iterator,const_iterator> equal_range(const key_type &key) const {
998 return std::make_pair(lower_bound(key), upper_bound(key));
999 }
1000
1001 // Inserts a value into the btree only if it does not already exist. The
1002 // boolean return value indicates whether insertion succeeded or failed. The
1003 // ValuePointer type is used to avoid instatiating the value unless the key
1004 // is being inserted. Value is not dereferenced if the key already exists in
1005 // the btree. See btree_map::operator[].
1006 template <typename ValuePointer>
1007 std::pair<iterator,bool> insert_unique(const key_type &key, ValuePointer value);
1008
1009 // Inserts a value into the btree only if it does not already exist. The
1010 // boolean return value indicates whether insertion succeeded or failed.
1011 std::pair<iterator,bool> insert_unique(const value_type &v) {
1012 return insert_unique(params_type::key(v), &v);
1013 }
1014
1015 // Insert with hint. Check to see if the value should be placed immediately
1016 // before position in the tree. If it does, then the insertion will take
1017 // amortized constant time. If not, the insertion will take amortized
1018 // logarithmic time as if a call to insert_unique(v) were made.
1019 iterator insert_unique(iterator position, const value_type &v);
1020
1021 // Insert a range of values into the btree.
1022 template <typename InputIterator>
1023 void insert_unique(InputIterator b, InputIterator e);
1024
1025 // Inserts a value into the btree. The ValuePointer type is used to avoid
1026 // instatiating the value unless the key is being inserted. Value is not
1027 // dereferenced if the key already exists in the btree. See
1028 // btree_map::operator[].
1029 template <typename ValuePointer>
1030 iterator insert_multi(const key_type &key, ValuePointer value);
1031
1032 // Inserts a value into the btree.
1033 iterator insert_multi(const value_type &v) {
1034 return insert_multi(params_type::key(v), &v);
1035 }
1036
1037 // Insert with hint. Check to see if the value should be placed immediately
1038 // before position in the tree. If it does, then the insertion will take
1039 // amortized constant time. If not, the insertion will take amortized
1040 // logarithmic time as if a call to insert_multi(v) were made.
1041 iterator insert_multi(iterator position, const value_type &v);
1042
1043 // Insert a range of values into the btree.
1044 template <typename InputIterator>
1045 void insert_multi(InputIterator b, InputIterator e);
1046
1047 void assign(const self_type &x);
1048
1049 // Erase the specified iterator from the btree. The iterator must be valid
1050 // (i.e. not equal to end()). Return an iterator pointing to the node after
1051 // the one that was erased (or end() if none exists).
1052 iterator erase(iterator iter);
1053
1054 // Erases range. Returns the number of keys erased.
1055 int erase(iterator begin, iterator end);
1056
1057 // Erases the specified key from the btree. Returns 1 if an element was
1058 // erased and 0 otherwise.
1059 int erase_unique(const key_type &key);
1060
1061 // Erases all of the entries matching the specified key from the
1062 // btree. Returns the number of elements erased.
1063 int erase_multi(const key_type &key);
1064
1065 // Finds the iterator corresponding to a key or returns end() if the key is
1066 // not present.
1067 iterator find_unique(const key_type &key) {
1068 return internal_end(
1069 internal_find_unique(key, iterator(root(), 0)));
1070 }
1071 const_iterator find_unique(const key_type &key) const {
1072 return internal_end(
1073 internal_find_unique(key, const_iterator(root(), 0)));
1074 }
1075 iterator find_multi(const key_type &key) {
1076 return internal_end(
1077 internal_find_multi(key, iterator(root(), 0)));
1078 }
1079 const_iterator find_multi(const key_type &key) const {
1080 return internal_end(
1081 internal_find_multi(key, const_iterator(root(), 0)));
1082 }
1083
1084 // Returns a count of the number of times the key appears in the btree.
1085 size_type count_unique(const key_type &key) const {
1086 const_iterator begin = internal_find_unique(
1087 key, const_iterator(root(), 0));
1088 if (!begin.node) {
1089 // The key doesn't exist in the tree.
1090 return 0;
1091 }
1092 return 1;
1093 }
1094 // Returns a count of the number of times the key appears in the btree.
1095 size_type count_multi(const key_type &key) const {
1096 return distance(lower_bound(key), upper_bound(key));
1097 }
1098
1099 // Clear the btree, deleting all of the values it contains.
1100 void clear();
1101
1102 // Swap the contents of *this and x.
1103 void swap(self_type &x);
1104
1105 // Assign the contents of x to *this.
1106 self_type& operator=(const self_type &x) {
1107 if (&x == this) {
1108 // Don't copy onto ourselves.
1109 return *this;
1110 }
1111 assign(x);
1112 return *this;
1113 }
1114
1115 key_compare* mutable_key_comp() {
1116 return this;
1117 }
1118 const key_compare& key_comp() const {
1119 return *this;
1120 }
1121 bool compare_keys(const key_type &x, const key_type &y) const {
1122 return btree_compare_keys(key_comp(), x, y);
1123 }
1124
1125 // Dump the btree to the specified ostream. Requires that operator<< is
1126 // defined for Key and Value.
1127 void dump(std::ostream &os) const {
1128 if (root() != NULL) {
1129 internal_dump(os, root(), 0);
1130 }
1131 }
1132
1133 // Verifies the structure of the btree.
1134 void verify() const;
1135
1136 // Size routines. Note that empty() is slightly faster than doing size()==0.
1137 size_type size() const {
1138 if (empty()) return 0;
1139 if (root()->leaf()) return root()->count();
1140 return root()->size();
1141 }
1142 size_type max_size() const { return std::numeric_limits<size_type>::max(); }
1143 bool empty() const { return root() == NULL; }
1144
1145 // The height of the btree. An empty tree will have height 0.
1146 size_type height() const {
1147 size_type h = 0;
1148 if (root()) {
1149 // Count the length of the chain from the leftmost node up to the
1150 // root. We actually count from the root back around to the level below
1151 // the root, but the calculation is the same because of the circularity
1152 // of that traversal.
1153 const node_type *n = root();
1154 do {
1155 ++h;
1156 n = n->parent();
1157 } while (n != root());
1158 }
1159 return h;
1160 }
1161
1162 // The number of internal, leaf and total nodes used by the btree.
1163 size_type leaf_nodes() const {
1164 return internal_stats(root()).leaf_nodes;
1165 }
1166 size_type internal_nodes() const {
1167 return internal_stats(root()).internal_nodes;
1168 }
1169 size_type nodes() const {
1170 node_stats stats = internal_stats(root());
1171 return stats.leaf_nodes + stats.internal_nodes;
1172 }
1173
1174 // The total number of bytes used by the btree.
1175 size_type bytes_used() const {
1176 node_stats stats = internal_stats(root());
1177 if (stats.leaf_nodes == 1 && stats.internal_nodes == 0) {
1178 return sizeof(*this) +
1179 sizeof(base_fields) + root()->max_count() * sizeof(value_type);
1180 } else {
1181 return sizeof(*this) +
1182 sizeof(root_fields) - sizeof(internal_fields) +
1183 stats.leaf_nodes * sizeof(leaf_fields) +
1184 stats.internal_nodes * sizeof(internal_fields);
1185 }
1186 }
1187
1188 // The average number of bytes used per value stored in the btree.
1189 static double average_bytes_per_value() {
1190 // Returns the number of bytes per value on a leaf node that is 75%
1191 // full. Experimentally, this matches up nicely with the computed number of
1192 // bytes per value in trees that had their values inserted in random order.
1193 return sizeof(leaf_fields) / (kNodeValues * 0.75);
1194 }
1195
1196 // The fullness of the btree. Computed as the number of elements in the btree
1197 // divided by the maximum number of elements a tree with the current number
1198 // of nodes could hold. A value of 1 indicates perfect space
1199 // utilization. Smaller values indicate space wastage.
1200 double fullness() const {
1201 return double(size()) / (nodes() * kNodeValues);
1202 }
1203 // The overhead of the btree structure in bytes per node. Computed as the
1204 // total number of bytes used by the btree minus the number of bytes used for
1205 // storing elements divided by the number of elements.
1206 double overhead() const {
1207 if (empty()) {
1208 return 0.0;
1209 }
1210 return (bytes_used() - size() * kValueSize) / double(size());
1211 }
1212
1213 private:
1214 // Internal accessor routines.
1215 node_type* root() { return root_.data; }
1216 const node_type* root() const { return root_.data; }
1217 node_type** mutable_root() { return &root_.data; }
1218
1219 // The rightmost node is stored in the root node.
1220 node_type* rightmost() {
1221 return (!root() || root()->leaf()) ? root() : root()->rightmost();
1222 }
1223 const node_type* rightmost() const {
1224 return (!root() || root()->leaf()) ? root() : root()->rightmost();
1225 }
1226 node_type** mutable_rightmost() { return root()->mutable_rightmost(); }
1227
1228 // The leftmost node is stored as the parent of the root node.
1229 node_type* leftmost() { return root() ? root()->parent() : NULL; }
1230 const node_type* leftmost() const { return root() ? root()->parent() : NULL; }
1231
1232 // The size of the tree is stored in the root node.
1233 size_type* mutable_size() { return root()->mutable_size(); }
1234
1235 // Allocator routines.
1236 internal_allocator_type* mutable_internal_allocator() {
1237 return static_cast<internal_allocator_type*>(&root_);
1238 }
1239 const internal_allocator_type& internal_allocator() const {
1240 return *static_cast<const internal_allocator_type*>(&root_);
1241 }
1242
1243 // Node creation/deletion routines.
1244 node_type* new_internal_node(node_type *parent) {
1245 internal_fields *p = reinterpret_cast<internal_fields*>(
1246 mutable_internal_allocator()->allocate(sizeof(internal_fields)));
1247 return node_type::init_internal(p, parent);
1248 }
1249 node_type* new_internal_root_node() {
1250 root_fields *p = reinterpret_cast<root_fields*>(
1251 mutable_internal_allocator()->allocate(sizeof(root_fields)));
1252 return node_type::init_root(p, root()->parent());
1253 }
1254 node_type* new_leaf_node(node_type *parent) {
1255 leaf_fields *p = reinterpret_cast<leaf_fields*>(
1256 mutable_internal_allocator()->allocate(sizeof(leaf_fields)));
1257 return node_type::init_leaf(p, parent, kNodeValues);
1258 }
1259 node_type* new_leaf_root_node(int max_count) {
1260 leaf_fields *p = reinterpret_cast<leaf_fields*>(
1261 mutable_internal_allocator()->allocate(
1262 sizeof(base_fields) + max_count * sizeof(value_type)));
1263 return node_type::init_leaf(p, reinterpret_cast<node_type*>(p), max_count);
1264 }
1265 void delete_internal_node(node_type *node) {
1266 node->destroy();
(1) Event fun_call_w_exception: |
Called function throws an exception of type "_ZN5boost16exception_detail10clone_implINS0_19error_info_injectorINSt8ios_base7failureB5cxx11EEEEE". [details] |
1267 ceph_assert(node != root());
1268 mutable_internal_allocator()->deallocate(
1269 reinterpret_cast<char*>(node), sizeof(internal_fields));
1270 }
1271 void delete_internal_root_node() {
1272 root()->destroy();
1273 mutable_internal_allocator()->deallocate(
1274 reinterpret_cast<char*>(root()), sizeof(root_fields));
1275 }
1276 void delete_leaf_node(node_type *node) {
1277 node->destroy();
1278 mutable_internal_allocator()->deallocate(
1279 reinterpret_cast<char*>(node),
1280 sizeof(base_fields) + node->max_count() * sizeof(value_type));
1281 }
1282
1283 // Rebalances or splits the node iter points to.
1284 void rebalance_or_split(iterator *iter);
1285
1286 // Merges the values of left, right and the delimiting key on their parent
1287 // onto left, removing the delimiting key and deleting right.
1288 void merge_nodes(node_type *left, node_type *right);
1289
1290 // Tries to merge node with its left or right sibling, and failing that,
1291 // rebalance with its left or right sibling. Returns true if a merge
1292 // occurred, at which point it is no longer valid to access node. Returns
1293 // false if no merging took place.
1294 bool try_merge_or_rebalance(iterator *iter);
1295
1296 // Tries to shrink the height of the tree by 1.
1297 void try_shrink();
1298
1299 iterator internal_end(iterator iter) {
1300 return iter.node ? iter : end();
1301 }
1302 const_iterator internal_end(const_iterator iter) const {
1303 return iter.node ? iter : end();
1304 }
1305
1306 // Inserts a value into the btree immediately before iter. Requires that
1307 // key(v) <= iter.key() and (--iter).key() <= key(v).
1308 iterator internal_insert(iterator iter, const value_type &v);
1309
1310 // Returns an iterator pointing to the first value >= the value "iter" is
1311 // pointing at. Note that "iter" might be pointing to an invalid location as
1312 // iter.position == iter.node->count(). This routine simply moves iter up in
1313 // the tree to a valid location.
1314 template <typename IterType>
1315 static IterType internal_last(IterType iter);
1316
1317 // Returns an iterator pointing to the leaf position at which key would
1318 // reside in the tree. We provide 2 versions of internal_locate. The first
1319 // version (internal_locate_plain_compare) always returns 0 for the second
1320 // field of the pair. The second version (internal_locate_compare_to) is for
1321 // the key-compare-to specialization and returns either kExactMatch (if the
1322 // key was found in the tree) or -kExactMatch (if it wasn't) in the second
1323 // field of the pair. The compare_to specialization allows the caller to
1324 // avoid a subsequent comparison to determine if an exact match was made,
1325 // speeding up string keys.
1326 template <typename IterType>
1327 std::pair<IterType, int> internal_locate(
1328 const key_type &key, IterType iter) const;
1329 template <typename IterType>
1330 std::pair<IterType, int> internal_locate_plain_compare(
1331 const key_type &key, IterType iter) const;
1332 template <typename IterType>
1333 std::pair<IterType, int> internal_locate_compare_to(
1334 const key_type &key, IterType iter) const;
1335
1336 // Internal routine which implements lower_bound().
1337 template <typename IterType>
1338 IterType internal_lower_bound(
1339 const key_type &key, IterType iter) const;
1340
1341 // Internal routine which implements upper_bound().
1342 template <typename IterType>
1343 IterType internal_upper_bound(
1344 const key_type &key, IterType iter) const;
1345
1346 // Internal routine which implements find_unique().
1347 template <typename IterType>
1348 IterType internal_find_unique(
1349 const key_type &key, IterType iter) const;
1350
1351 // Internal routine which implements find_multi().
1352 template <typename IterType>
1353 IterType internal_find_multi(
1354 const key_type &key, IterType iter) const;
1355
1356 // Deletes a node and all of its children.
1357 void internal_clear(node_type *node);
1358
1359 // Dumps a node and all of its children to the specified ostream.
1360 void internal_dump(std::ostream &os, const node_type *node, int level) const;
1361
1362 // Verifies the tree structure of node.
1363 int internal_verify(const node_type *node,
1364 const key_type *lo, const key_type *hi) const;
1365
1366 node_stats internal_stats(const node_type *node) const {
1367 if (!node) {
1368 return node_stats(0, 0);
1369 }
1370 if (node->leaf()) {
1371 return node_stats(1, 0);
1372 }
1373 node_stats res(0, 1);
1374 for (int i = 0; i <= node->count(); ++i) {
1375 res += internal_stats(node->child(i));
1376 }
1377 return res;
1378 }
1379
1380 private:
1381 empty_base_handle<internal_allocator_type, node_type*> root_;
1382
1383 private:
1384 // A never instantiated helper function that returns big_ if we have a
1385 // key-compare-to functor or if R is bool and small_ otherwise.
1386 template <typename R>
1387 static typename if_<
1388 if_<is_key_compare_to::value,
1389 std::is_same<R, int>,
1390 std::is_same<R, bool> >::type::value,
1391 big_, small_>::type key_compare_checker(R);
1392
1393 // A never instantiated helper function that returns the key comparison
1394 // functor.
1395 static key_compare key_compare_helper();
1396
1397 // Verify that key_compare returns a bool. This is similar to the way
1398 // is_convertible in base/type_traits.h works. Note that key_compare_checker
1399 // is never actually invoked. The compiler will select which
1400 // key_compare_checker() to instantiate and then figure out the size of the
1401 // return type of key_compare_checker() at compile time which we then check
1402 // against the sizeof of big_.
1403 COMPILE_ASSERT(
1404 sizeof(key_compare_checker(key_compare_helper()(key_type(), key_type()))) ==
1405 sizeof(big_),
1406 key_comparison_function_must_return_bool);
1407
1408 // Note: We insist on kTargetValues, which is computed from
1409 // Params::kTargetNodeSize, must fit the base_fields::field_type.
1410 COMPILE_ASSERT(kNodeValues <
1411 (1 << (8 * sizeof(typename base_fields::field_type))),
1412 target_node_size_too_large);
1413
1414 // Test the assumption made in setting kNodeValueSpace.
1415 COMPILE_ASSERT(sizeof(base_fields) >= 2 * sizeof(void*),
1416 node_space_assumption_incorrect);
1417 };
1418
1419 ////
1420 // btree_node methods
1421 template <typename P>
1422 inline void btree_node<P>::insert_value(int i, const value_type &x) {
1423 ceph_assert(i <= count());
1424 value_init(count(), x);
1425 for (int j = count(); j > i; --j) {
1426 value_swap(j, this, j - 1);
1427 }
1428 set_count(count() + 1);
1429
1430 if (!leaf()) {
1431 ++i;
1432 for (int j = count(); j > i; --j) {
1433 *mutable_child(j) = child(j - 1);
1434 child(j)->set_position(j);
1435 }
1436 *mutable_child(i) = NULL;
1437 }
1438 }
1439
1440 template <typename P>
1441 inline void btree_node<P>::remove_value(int i) {
1442 if (!leaf()) {
1443 ceph_assert(child(i + 1)->count() == 0);
1444 for (int j = i + 1; j < count(); ++j) {
1445 *mutable_child(j) = child(j + 1);
1446 child(j)->set_position(j);
1447 }
1448 *mutable_child(count()) = NULL;
1449 }
1450
1451 set_count(count() - 1);
1452 for (; i < count(); ++i) {
1453 value_swap(i, this, i + 1);
1454 }
1455 value_destroy(i);
1456 }
1457
1458 template <typename P>
1459 void btree_node<P>::rebalance_right_to_left(btree_node *src, int to_move) {
1460 ceph_assert(parent() == src->parent());
1461 ceph_assert(position() + 1 == src->position());
1462 ceph_assert(src->count() >= count());
1463 ceph_assert(to_move >= 1);
1464 ceph_assert(to_move <= src->count());
1465
1466 // Make room in the left node for the new values.
1467 for (int i = 0; i < to_move; ++i) {
1468 value_init(i + count());
1469 }
1470
1471 // Move the delimiting value to the left node and the new delimiting value
1472 // from the right node.
1473 value_swap(count(), parent(), position());
1474 parent()->value_swap(position(), src, to_move - 1);
1475
1476 // Move the values from the right to the left node.
1477 for (int i = 1; i < to_move; ++i) {
1478 value_swap(count() + i, src, i - 1);
1479 }
1480 // Shift the values in the right node to their correct position.
1481 for (int i = to_move; i < src->count(); ++i) {
1482 src->value_swap(i - to_move, src, i);
1483 }
1484 for (int i = 1; i <= to_move; ++i) {
1485 src->value_destroy(src->count() - i);
1486 }
1487
1488 if (!leaf()) {
1489 // Move the child pointers from the right to the left node.
1490 for (int i = 0; i < to_move; ++i) {
1491 set_child(1 + count() + i, src->child(i));
1492 }
1493 for (int i = 0; i <= src->count() - to_move; ++i) {
1494 ceph_assert(i + to_move <= src->max_count());
1495 src->set_child(i, src->child(i + to_move));
1496 *src->mutable_child(i + to_move) = NULL;
1497 }
1498 }
1499
1500 // Fixup the counts on the src and dest nodes.
1501 set_count(count() + to_move);
1502 src->set_count(src->count() - to_move);
1503 }
1504
1505 template <typename P>
1506 void btree_node<P>::rebalance_left_to_right(btree_node *dest, int to_move) {
1507 ceph_assert(parent() == dest->parent());
1508 ceph_assert(position() + 1 == dest->position());
1509 ceph_assert(count() >= dest->count());
1510 ceph_assert(to_move >= 1);
1511 ceph_assert(to_move <= count());
1512
1513 // Make room in the right node for the new values.
1514 for (int i = 0; i < to_move; ++i) {
1515 dest->value_init(i + dest->count());
1516 }
1517 for (int i = dest->count() - 1; i >= 0; --i) {
1518 dest->value_swap(i, dest, i + to_move);
1519 }
1520
1521 // Move the delimiting value to the right node and the new delimiting value
1522 // from the left node.
1523 dest->value_swap(to_move - 1, parent(), position());
1524 parent()->value_swap(position(), this, count() - to_move);
1525 value_destroy(count() - to_move);
1526
1527 // Move the values from the left to the right node.
1528 for (int i = 1; i < to_move; ++i) {
1529 value_swap(count() - to_move + i, dest, i - 1);
1530 value_destroy(count() - to_move + i);
1531 }
1532
1533 if (!leaf()) {
1534 // Move the child pointers from the left to the right node.
1535 for (int i = dest->count(); i >= 0; --i) {
1536 dest->set_child(i + to_move, dest->child(i));
1537 *dest->mutable_child(i) = NULL;
1538 }
1539 for (int i = 1; i <= to_move; ++i) {
1540 dest->set_child(i - 1, child(count() - to_move + i));
1541 *mutable_child(count() - to_move + i) = NULL;
1542 }
1543 }
1544
1545 // Fixup the counts on the src and dest nodes.
1546 set_count(count() - to_move);
1547 dest->set_count(dest->count() + to_move);
1548 }
1549
1550 template <typename P>
1551 void btree_node<P>::split(btree_node *dest, int insert_position) {
1552 ceph_assert(dest->count() == 0);
1553
1554 // We bias the split based on the position being inserted. If we're
1555 // inserting at the beginning of the left node then bias the split to put
1556 // more values on the right node. If we're inserting at the end of the
1557 // right node then bias the split to put more values on the left node.
1558 if (insert_position == 0) {
1559 dest->set_count(count() - 1);
1560 } else if (insert_position == max_count()) {
1561 dest->set_count(0);
1562 } else {
1563 dest->set_count(count() / 2);
1564 }
1565 set_count(count() - dest->count());
1566 ceph_assert(count() >= 1);
1567
1568 // Move values from the left sibling to the right sibling.
1569 for (int i = 0; i < dest->count(); ++i) {
1570 dest->value_init(i);
1571 value_swap(count() + i, dest, i);
1572 value_destroy(count() + i);
1573 }
1574
1575 // The split key is the largest value in the left sibling.
1576 set_count(count() - 1);
1577 parent()->insert_value(position(), value_type());
1578 value_swap(count(), parent(), position());
1579 value_destroy(count());
1580 parent()->set_child(position() + 1, dest);
1581
1582 if (!leaf()) {
1583 for (int i = 0; i <= dest->count(); ++i) {
1584 ceph_assert(child(count() + i + 1) != NULL);
1585 dest->set_child(i, child(count() + i + 1));
1586 *mutable_child(count() + i + 1) = NULL;
1587 }
1588 }
1589 }
1590
1591 template <typename P>
1592 void btree_node<P>::merge(btree_node *src) {
1593 ceph_assert(parent() == src->parent());
1594 ceph_assert(position() + 1 == src->position());
1595
1596 // Move the delimiting value to the left node.
1597 value_init(count());
1598 value_swap(count(), parent(), position());
1599
1600 // Move the values from the right to the left node.
1601 for (int i = 0; i < src->count(); ++i) {
1602 value_init(1 + count() + i);
1603 value_swap(1 + count() + i, src, i);
1604 src->value_destroy(i);
1605 }
1606
1607 if (!leaf()) {
1608 // Move the child pointers from the right to the left node.
1609 for (int i = 0; i <= src->count(); ++i) {
1610 set_child(1 + count() + i, src->child(i));
1611 *src->mutable_child(i) = NULL;
1612 }
1613 }
1614
1615 // Fixup the counts on the src and dest nodes.
1616 set_count(1 + count() + src->count());
1617 src->set_count(0);
1618
1619 // Remove the value on the parent node.
1620 parent()->remove_value(position());
1621 }
1622
1623 template <typename P>
1624 void btree_node<P>::swap(btree_node *x) {
1625 ceph_assert(leaf() == x->leaf());
1626
1627 // Swap the values.
1628 for (int i = count(); i < x->count(); ++i) {
1629 value_init(i);
1630 }
1631 for (int i = x->count(); i < count(); ++i) {
1632 x->value_init(i);
1633 }
1634 int n = std::max(count(), x->count());
1635 for (int i = 0; i < n; ++i) {
1636 value_swap(i, x, i);
1637 }
1638 for (int i = count(); i < x->count(); ++i) {
1639 x->value_destroy(i);
1640 }
1641 for (int i = x->count(); i < count(); ++i) {
1642 value_destroy(i);
1643 }
1644
1645 if (!leaf()) {
1646 // Swap the child pointers.
1647 for (int i = 0; i <= n; ++i) {
1648 btree_swap_helper(*mutable_child(i), *x->mutable_child(i));
1649 }
1650 for (int i = 0; i <= count(); ++i) {
1651 x->child(i)->fields_.parent = x;
1652 }
1653 for (int i = 0; i <= x->count(); ++i) {
1654 child(i)->fields_.parent = this;
1655 }
1656 }
1657
1658 // Swap the counts.
1659 btree_swap_helper(fields_.count, x->fields_.count);
1660 }
1661
1662 ////
1663 // btree_iterator methods
1664 template <typename N, typename R, typename P>
1665 void btree_iterator<N, R, P>::increment_slow() {
1666 if (node->leaf()) {
1667 ceph_assert(position >= node->count());
1668 self_type save(*this);
1669 while (position == node->count() && !node->is_root()) {
1670 ceph_assert(node->parent()->child(node->position()) == node);
1671 position = node->position();
1672 node = node->parent();
1673 }
1674 if (position == node->count()) {
1675 *this = save;
1676 }
1677 } else {
1678 ceph_assert(position < node->count());
1679 node = node->child(position + 1);
1680 while (!node->leaf()) {
1681 node = node->child(0);
1682 }
1683 position = 0;
1684 }
1685 }
1686
1687 template <typename N, typename R, typename P>
1688 void btree_iterator<N, R, P>::increment_by(int count) {
1689 while (count > 0) {
1690 if (node->leaf()) {
1691 int rest = node->count() - position;
1692 position += std::min(rest, count);
1693 count = count - rest;
1694 if (position < node->count()) {
1695 return;
1696 }
1697 } else {
1698 --count;
1699 }
1700 increment_slow();
1701 }
1702 }
1703
1704 template <typename N, typename R, typename P>
1705 void btree_iterator<N, R, P>::decrement_slow() {
1706 if (node->leaf()) {
1707 ceph_assert(position <= -1);
1708 self_type save(*this);
1709 while (position < 0 && !node->is_root()) {
1710 ceph_assert(node->parent()->child(node->position()) == node);
1711 position = node->position() - 1;
1712 node = node->parent();
1713 }
1714 if (position < 0) {
1715 *this = save;
1716 }
1717 } else {
1718 ceph_assert(position >= 0);
1719 node = node->child(position);
1720 while (!node->leaf()) {
1721 node = node->child(node->count());
1722 }
1723 position = node->count() - 1;
1724 }
1725 }
1726
1727 ////
1728 // btree methods
1729 template <typename P>
1730 btree<P>::btree(const key_compare &comp, const allocator_type &alloc)
1731 : key_compare(comp),
1732 root_(alloc, NULL) {
1733 }
1734
1735 template <typename P>
1736 btree<P>::btree(const self_type &x)
1737 : key_compare(x.key_comp()),
1738 root_(x.internal_allocator(), NULL) {
1739 assign(x);
1740 }
1741
1742 template <typename P> template <typename ValuePointer>
1743 std::pair<typename btree<P>::iterator, bool>
1744 btree<P>::insert_unique(const key_type &key, ValuePointer value) {
1745 if (empty()) {
1746 *mutable_root() = new_leaf_root_node(1);
1747 }
1748
1749 std::pair<iterator, int> res = internal_locate(key, iterator(root(), 0));
1750 iterator &iter = res.first;
1751 if (res.second == kExactMatch) {
1752 // The key already exists in the tree, do nothing.
1753 return std::make_pair(internal_last(iter), false);
1754 } else if (!res.second) {
1755 iterator last = internal_last(iter);
1756 if (last.node && !compare_keys(key, last.key())) {
1757 // The key already exists in the tree, do nothing.
1758 return std::make_pair(last, false);
1759 }
1760 }
1761
1762 return std::make_pair(internal_insert(iter, *value), true);
1763 }
1764
1765 template <typename P>
1766 inline typename btree<P>::iterator
1767 btree<P>::insert_unique(iterator position, const value_type &v) {
1768 if (!empty()) {
1769 const key_type &key = params_type::key(v);
1770 if (position == end() || compare_keys(key, position.key())) {
1771 iterator prev = position;
1772 if (position == begin() || compare_keys((--prev).key(), key)) {
1773 // prev.key() < key < position.key()
1774 return internal_insert(position, v);
1775 }
1776 } else if (compare_keys(position.key(), key)) {
1777 iterator next = position;
1778 ++next;
1779 if (next == end() || compare_keys(key, next.key())) {
1780 // position.key() < key < next.key()
1781 return internal_insert(next, v);
1782 }
1783 } else {
1784 // position.key() == key
1785 return position;
1786 }
1787 }
1788 return insert_unique(v).first;
1789 }
1790
1791 template <typename P> template <typename InputIterator>
1792 void btree<P>::insert_unique(InputIterator b, InputIterator e) {
1793 for (; b != e; ++b) {
1794 insert_unique(end(), *b);
1795 }
1796 }
1797
1798 template <typename P> template <typename ValuePointer>
1799 typename btree<P>::iterator
1800 btree<P>::insert_multi(const key_type &key, ValuePointer value) {
1801 if (empty()) {
1802 *mutable_root() = new_leaf_root_node(1);
1803 }
1804
1805 iterator iter = internal_upper_bound(key, iterator(root(), 0));
1806 if (!iter.node) {
1807 iter = end();
1808 }
1809 return internal_insert(iter, *value);
1810 }
1811
1812 template <typename P>
1813 typename btree<P>::iterator
1814 btree<P>::insert_multi(iterator position, const value_type &v) {
1815 if (!empty()) {
1816 const key_type &key = params_type::key(v);
1817 if (position == end() || !compare_keys(position.key(), key)) {
1818 iterator prev = position;
1819 if (position == begin() || !compare_keys(key, (--prev).key())) {
1820 // prev.key() <= key <= position.key()
1821 return internal_insert(position, v);
1822 }
1823 } else {
1824 iterator next = position;
1825 ++next;
1826 if (next == end() || !compare_keys(next.key(), key)) {
1827 // position.key() < key <= next.key()
1828 return internal_insert(next, v);
1829 }
1830 }
1831 }
1832 return insert_multi(v);
1833 }
1834
1835 template <typename P> template <typename InputIterator>
1836 void btree<P>::insert_multi(InputIterator b, InputIterator e) {
1837 for (; b != e; ++b) {
1838 insert_multi(end(), *b);
1839 }
1840 }
1841
1842 template <typename P>
1843 void btree<P>::assign(const self_type &x) {
1844 clear();
1845
1846 *mutable_key_comp() = x.key_comp();
1847 *mutable_internal_allocator() = x.internal_allocator();
1848
1849 // Assignment can avoid key comparisons because we know the order of the
1850 // values is the same order we'll store them in.
1851 for (const_iterator iter = x.begin(); iter != x.end(); ++iter) {
1852 if (empty()) {
1853 insert_multi(*iter);
1854 } else {
1855 // If the btree is not empty, we can just insert the new value at the end
1856 // of the tree!
1857 internal_insert(end(), *iter);
1858 }
1859 }
1860 }
1861
1862 template <typename P>
1863 typename btree<P>::iterator btree<P>::erase(iterator iter) {
1864 bool internal_delete = false;
1865 if (!iter.node->leaf()) {
1866 // Deletion of a value on an internal node. Swap the key with the largest
1867 // value of our left child. This is easy, we just decrement iter.
1868 iterator tmp_iter(iter--);
1869 ceph_assert(iter.node->leaf());
1870 ceph_assert(!compare_keys(tmp_iter.key(), iter.key()));
1871 iter.node->value_swap(iter.position, tmp_iter.node, tmp_iter.position);
1872 internal_delete = true;
1873 --*mutable_size();
1874 } else if (!root()->leaf()) {
1875 --*mutable_size();
1876 }
1877
1878 // Delete the key from the leaf.
1879 iter.node->remove_value(iter.position);
1880
1881 // We want to return the next value after the one we just erased. If we
1882 // erased from an internal node (internal_delete == true), then the next
1883 // value is ++(++iter). If we erased from a leaf node (internal_delete ==
1884 // false) then the next value is ++iter. Note that ++iter may point to an
1885 // internal node and the value in the internal node may move to a leaf node
1886 // (iter.node) when rebalancing is performed at the leaf level.
1887
1888 // Merge/rebalance as we walk back up the tree.
1889 iterator res(iter);
1890 for (;;) {
1891 if (iter.node == root()) {
1892 try_shrink();
1893 if (empty()) {
1894 return end();
1895 }
1896 break;
1897 }
1898 if (iter.node->count() >= kMinNodeValues) {
1899 break;
1900 }
1901 bool merged = try_merge_or_rebalance(&iter);
1902 if (iter.node->leaf()) {
1903 res = iter;
1904 }
1905 if (!merged) {
1906 break;
1907 }
1908 iter.node = iter.node->parent();
1909 }
1910
1911 // Adjust our return value. If we're pointing at the end of a node, advance
1912 // the iterator.
1913 if (res.position == res.node->count()) {
1914 res.position = res.node->count() - 1;
1915 ++res;
1916 }
1917 // If we erased from an internal node, advance the iterator.
1918 if (internal_delete) {
1919 ++res;
1920 }
1921 return res;
1922 }
1923
1924 template <typename P>
1925 int btree<P>::erase(iterator begin, iterator end) {
1926 int count = distance(begin, end);
1927 for (int i = 0; i < count; i++) {
1928 begin = erase(begin);
1929 }
1930 return count;
1931 }
1932
1933 template <typename P>
1934 int btree<P>::erase_unique(const key_type &key) {
1935 iterator iter = internal_find_unique(key, iterator(root(), 0));
1936 if (!iter.node) {
1937 // The key doesn't exist in the tree, return nothing done.
1938 return 0;
1939 }
1940 erase(iter);
1941 return 1;
1942 }
1943
1944 template <typename P>
1945 int btree<P>::erase_multi(const key_type &key) {
1946 iterator begin = internal_lower_bound(key, iterator(root(), 0));
1947 if (!begin.node) {
1948 // The key doesn't exist in the tree, return nothing done.
1949 return 0;
1950 }
1951 // Delete all of the keys between begin and upper_bound(key).
1952 iterator end = internal_end(
1953 internal_upper_bound(key, iterator(root(), 0)));
1954 return erase(begin, end);
1955 }
1956
1957 template <typename P>
1958 void btree<P>::clear() {
1959 if (root() != NULL) {
(1) Event fun_call_w_exception: |
Called function throws an exception of type "_ZN5boost16exception_detail10clone_implINS0_19error_info_injectorINSt8ios_base7failureB5cxx11EEEEE". [details] |
1960 internal_clear(root());
1961 }
1962 *mutable_root() = NULL;
1963 }
1964
1965 template <typename P>
1966 void btree<P>::swap(self_type &x) {
1967 std::swap(static_cast<key_compare&>(*this), static_cast<key_compare&>(x));
1968 std::swap(root_, x.root_);
1969 }
1970
1971 template <typename P>
1972 void btree<P>::verify() const {
1973 if (root() != NULL) {
1974 ceph_assert(size() == internal_verify(root(), NULL, NULL));
1975 ceph_assert(leftmost() == (++const_iterator(root(), -1)).node);
1976 ceph_assert(rightmost() == (--const_iterator(root(), root()->count())).node);
1977 ceph_assert(leftmost()->leaf());
1978 ceph_assert(rightmost()->leaf());
1979 } else {
1980 ceph_assert(size() == 0);
1981 ceph_assert(leftmost() == NULL);
1982 ceph_assert(rightmost() == NULL);
1983 }
1984 }
1985
1986 template <typename P>
1987 void btree<P>::rebalance_or_split(iterator *iter) {
1988 node_type *&node = iter->node;
1989 int &insert_position = iter->position;
1990 ceph_assert(node->count() == node->max_count());
1991
1992 // First try to make room on the node by rebalancing.
1993 node_type *parent = node->parent();
1994 if (node != root()) {
1995 if (node->position() > 0) {
1996 // Try rebalancing with our left sibling.
1997 node_type *left = parent->child(node->position() - 1);
1998 if (left->count() < left->max_count()) {
1999 // We bias rebalancing based on the position being inserted. If we're
2000 // inserting at the end of the right node then we bias rebalancing to
2001 // fill up the left node.
2002 int to_move = (left->max_count() - left->count()) /
2003 (1 + (insert_position < left->max_count()));
2004 to_move = std::max(1, to_move);
2005
2006 if (((insert_position - to_move) >= 0) ||
2007 ((left->count() + to_move) < left->max_count())) {
2008 left->rebalance_right_to_left(node, to_move);
2009
2010 ceph_assert(node->max_count() - node->count() == to_move);
2011 insert_position = insert_position - to_move;
2012 if (insert_position < 0) {
2013 insert_position = insert_position + left->count() + 1;
2014 node = left;
2015 }
2016
2017 ceph_assert(node->count() < node->max_count());
2018 return;
2019 }
2020 }
2021 }
2022
2023 if (node->position() < parent->count()) {
2024 // Try rebalancing with our right sibling.
2025 node_type *right = parent->child(node->position() + 1);
2026 if (right->count() < right->max_count()) {
2027 // We bias rebalancing based on the position being inserted. If we're
2028 // inserting at the beginning of the left node then we bias rebalancing
2029 // to fill up the right node.
2030 int to_move = (right->max_count() - right->count()) /
2031 (1 + (insert_position > 0));
2032 to_move = std::max(1, to_move);
2033
2034 if ((insert_position <= (node->count() - to_move)) ||
2035 ((right->count() + to_move) < right->max_count())) {
2036 node->rebalance_left_to_right(right, to_move);
2037
2038 if (insert_position > node->count()) {
2039 insert_position = insert_position - node->count() - 1;
2040 node = right;
2041 }
2042
2043 ceph_assert(node->count() < node->max_count());
2044 return;
2045 }
2046 }
2047 }
2048
2049 // Rebalancing failed, make sure there is room on the parent node for a new
2050 // value.
2051 if (parent->count() == parent->max_count()) {
2052 iterator parent_iter(node->parent(), node->position());
2053 rebalance_or_split(&parent_iter);
2054 }
2055 } else {
2056 // Rebalancing not possible because this is the root node.
2057 if (root()->leaf()) {
2058 // The root node is currently a leaf node: create a new root node and set
2059 // the current root node as the child of the new root.
2060 parent = new_internal_root_node();
2061 parent->set_child(0, root());
2062 *mutable_root() = parent;
2063 ceph_assert(*mutable_rightmost() == parent->child(0));
2064 } else {
2065 // The root node is an internal node. We do not want to create a new root
2066 // node because the root node is special and holds the size of the tree
2067 // and a pointer to the rightmost node. So we create a new internal node
2068 // and move all of the items on the current root into the new node.
2069 parent = new_internal_node(parent);
2070 parent->set_child(0, parent);
2071 parent->swap(root());
2072 node = parent;
2073 }
2074 }
2075
2076 // Split the node.
2077 node_type *split_node;
2078 if (node->leaf()) {
2079 split_node = new_leaf_node(parent);
2080 node->split(split_node, insert_position);
2081 if (rightmost() == node) {
2082 *mutable_rightmost() = split_node;
2083 }
2084 } else {
2085 split_node = new_internal_node(parent);
2086 node->split(split_node, insert_position);
2087 }
2088
2089 if (insert_position > node->count()) {
2090 insert_position = insert_position - node->count() - 1;
2091 node = split_node;
2092 }
2093 }
2094
2095 template <typename P>
2096 void btree<P>::merge_nodes(node_type *left, node_type *right) {
2097 left->merge(right);
2098 if (right->leaf()) {
2099 if (rightmost() == right) {
2100 *mutable_rightmost() = left;
2101 }
2102 delete_leaf_node(right);
2103 } else {
2104 delete_internal_node(right);
2105 }
2106 }
2107
2108 template <typename P>
2109 bool btree<P>::try_merge_or_rebalance(iterator *iter) {
2110 node_type *parent = iter->node->parent();
2111 if (iter->node->position() > 0) {
2112 // Try merging with our left sibling.
2113 node_type *left = parent->child(iter->node->position() - 1);
2114 if ((1 + left->count() + iter->node->count()) <= left->max_count()) {
2115 iter->position += 1 + left->count();
2116 merge_nodes(left, iter->node);
2117 iter->node = left;
2118 return true;
2119 }
2120 }
2121 if (iter->node->position() < parent->count()) {
2122 // Try merging with our right sibling.
2123 node_type *right = parent->child(iter->node->position() + 1);
2124 if ((1 + iter->node->count() + right->count()) <= right->max_count()) {
2125 merge_nodes(iter->node, right);
2126 return true;
2127 }
2128 // Try rebalancing with our right sibling. We don't perform rebalancing if
2129 // we deleted the first element from iter->node and the node is not
2130 // empty. This is a small optimization for the common pattern of deleting
2131 // from the front of the tree.
2132 if ((right->count() > kMinNodeValues) &&
2133 ((iter->node->count() == 0) ||
2134 (iter->position > 0))) {
2135 int to_move = (right->count() - iter->node->count()) / 2;
2136 to_move = std::min(to_move, right->count() - 1);
2137 iter->node->rebalance_right_to_left(right, to_move);
2138 return false;
2139 }
2140 }
2141 if (iter->node->position() > 0) {
2142 // Try rebalancing with our left sibling. We don't perform rebalancing if
2143 // we deleted the last element from iter->node and the node is not
2144 // empty. This is a small optimization for the common pattern of deleting
2145 // from the back of the tree.
2146 node_type *left = parent->child(iter->node->position() - 1);
2147 if ((left->count() > kMinNodeValues) &&
2148 ((iter->node->count() == 0) ||
2149 (iter->position < iter->node->count()))) {
2150 int to_move = (left->count() - iter->node->count()) / 2;
2151 to_move = std::min(to_move, left->count() - 1);
2152 left->rebalance_left_to_right(iter->node, to_move);
2153 iter->position += to_move;
2154 return false;
2155 }
2156 }
2157 return false;
2158 }
2159
2160 template <typename P>
2161 void btree<P>::try_shrink() {
2162 if (root()->count() > 0) {
2163 return;
2164 }
2165 // Deleted the last item on the root node, shrink the height of the tree.
2166 if (root()->leaf()) {
2167 ceph_assert(size() == 0);
2168 delete_leaf_node(root());
2169 *mutable_root() = NULL;
2170 } else {
2171 node_type *child = root()->child(0);
2172 if (child->leaf()) {
2173 // The child is a leaf node so simply make it the root node in the tree.
2174 child->make_root();
2175 delete_internal_root_node();
2176 *mutable_root() = child;
2177 } else {
2178 // The child is an internal node. We want to keep the existing root node
2179 // so we move all of the values from the child node into the existing
2180 // (empty) root node.
2181 child->swap(root());
2182 delete_internal_node(child);
2183 }
2184 }
2185 }
2186
2187 template <typename P> template <typename IterType>
2188 inline IterType btree<P>::internal_last(IterType iter) {
2189 while (iter.node && iter.position == iter.node->count()) {
2190 iter.position = iter.node->position();
2191 iter.node = iter.node->parent();
2192 if (iter.node->leaf()) {
2193 iter.node = NULL;
2194 }
2195 }
2196 return iter;
2197 }
2198
2199 template <typename P>
2200 inline typename btree<P>::iterator
2201 btree<P>::internal_insert(iterator iter, const value_type &v) {
2202 if (!iter.node->leaf()) {
2203 // We can't insert on an internal node. Instead, we'll insert after the
2204 // previous value which is guaranteed to be on a leaf node.
2205 --iter;
2206 ++iter.position;
2207 }
2208 if (iter.node->count() == iter.node->max_count()) {
2209 // Make room in the leaf for the new item.
2210 if (iter.node->max_count() < kNodeValues) {
2211 // Insertion into the root where the root is smaller that the full node
2212 // size. Simply grow the size of the root node.
2213 ceph_assert(iter.node == root());
2214 iter.node = new_leaf_root_node(
2215 std::min<int>(kNodeValues, 2 * iter.node->max_count()));
2216 iter.node->swap(root());
2217 delete_leaf_node(root());
2218 *mutable_root() = iter.node;
2219 } else {
2220 rebalance_or_split(&iter);
2221 ++*mutable_size();
2222 }
2223 } else if (!root()->leaf()) {
2224 ++*mutable_size();
2225 }
2226 iter.node->insert_value(iter.position, v);
2227 return iter;
2228 }
2229
2230 template <typename P> template <typename IterType>
2231 inline std::pair<IterType, int> btree<P>::internal_locate(
2232 const key_type &key, IterType iter) const {
2233 return internal_locate_type::dispatch(key, *this, iter);
2234 }
2235
2236 template <typename P> template <typename IterType>
2237 inline std::pair<IterType, int> btree<P>::internal_locate_plain_compare(
2238 const key_type &key, IterType iter) const {
2239 for (;;) {
2240 iter.position = iter.node->lower_bound(key, key_comp());
2241 if (iter.node->leaf()) {
2242 break;
2243 }
2244 iter.node = iter.node->child(iter.position);
2245 }
2246 return std::make_pair(iter, 0);
2247 }
2248
2249 template <typename P> template <typename IterType>
2250 inline std::pair<IterType, int> btree<P>::internal_locate_compare_to(
2251 const key_type &key, IterType iter) const {
2252 for (;;) {
2253 int res = iter.node->lower_bound(key, key_comp());
2254 iter.position = res & kMatchMask;
2255 if (res & kExactMatch) {
2256 return std::make_pair(iter, static_cast<int>(kExactMatch));
2257 }
2258 if (iter.node->leaf()) {
2259 break;
2260 }
2261 iter.node = iter.node->child(iter.position);
2262 }
2263 return std::make_pair(iter, -kExactMatch);
2264 }
2265
2266 template <typename P> template <typename IterType>
2267 IterType btree<P>::internal_lower_bound(
2268 const key_type &key, IterType iter) const {
2269 if (iter.node) {
2270 for (;;) {
2271 iter.position =
2272 iter.node->lower_bound(key, key_comp()) & kMatchMask;
2273 if (iter.node->leaf()) {
2274 break;
2275 }
2276 iter.node = iter.node->child(iter.position);
2277 }
2278 iter = internal_last(iter);
2279 }
2280 return iter;
2281 }
2282
2283 template <typename P> template <typename IterType>
2284 IterType btree<P>::internal_upper_bound(
2285 const key_type &key, IterType iter) const {
2286 if (iter.node) {
2287 for (;;) {
2288 iter.position = iter.node->upper_bound(key, key_comp());
2289 if (iter.node->leaf()) {
2290 break;
2291 }
2292 iter.node = iter.node->child(iter.position);
2293 }
2294 iter = internal_last(iter);
2295 }
2296 return iter;
2297 }
2298
2299 template <typename P> template <typename IterType>
2300 IterType btree<P>::internal_find_unique(
2301 const key_type &key, IterType iter) const {
2302 if (iter.node) {
2303 std::pair<IterType, int> res = internal_locate(key, iter);
2304 if (res.second == kExactMatch) {
2305 return res.first;
2306 }
2307 if (!res.second) {
2308 iter = internal_last(res.first);
2309 if (iter.node && !compare_keys(key, iter.key())) {
2310 return iter;
2311 }
2312 }
2313 }
2314 return IterType(NULL, 0);
2315 }
2316
2317 template <typename P> template <typename IterType>
2318 IterType btree<P>::internal_find_multi(
2319 const key_type &key, IterType iter) const {
2320 if (iter.node) {
2321 iter = internal_lower_bound(key, iter);
2322 if (iter.node) {
2323 iter = internal_last(iter);
2324 if (iter.node && !compare_keys(key, iter.key())) {
2325 return iter;
2326 }
2327 }
2328 }
2329 return IterType(NULL, 0);
2330 }
2331
2332 template <typename P>
2333 void btree<P>::internal_clear(node_type *node) {
2334 if (!node->leaf()) {
2335 for (int i = 0; i <= node->count(); ++i) {
2336 internal_clear(node->child(i));
2337 }
2338 if (node == root()) {
2339 delete_internal_root_node();
2340 } else {
(1) Event fun_call_w_exception: |
Called function throws an exception of type "_ZN5boost16exception_detail10clone_implINS0_19error_info_injectorINSt8ios_base7failureB5cxx11EEEEE". [details] |
2341 delete_internal_node(node);
2342 }
2343 } else {
2344 delete_leaf_node(node);
2345 }
2346 }
2347
2348 template <typename P>
2349 void btree<P>::internal_dump(
2350 std::ostream &os, const node_type *node, int level) const {
2351 for (int i = 0; i < node->count(); ++i) {
2352 if (!node->leaf()) {
2353 internal_dump(os, node->child(i), level + 1);
2354 }
2355 for (int j = 0; j < level; ++j) {
2356 os << " ";
2357 }
2358 os << node->key(i) << " [" << level << "]\n";
2359 }
2360 if (!node->leaf()) {
2361 internal_dump(os, node->child(node->count()), level + 1);
2362 }
2363 }
2364
2365 template <typename P>
2366 int btree<P>::internal_verify(
2367 const node_type *node, const key_type *lo, const key_type *hi) const {
2368 ceph_assert(node->count() > 0);
2369 ceph_assert(node->count() <= node->max_count());
2370 if (lo) {
2371 ceph_assert(!compare_keys(node->key(0), *lo));
2372 }
2373 if (hi) {
2374 ceph_assert(!compare_keys(*hi, node->key(node->count() - 1)));
2375 }
2376 for (int i = 1; i < node->count(); ++i) {
2377 ceph_assert(!compare_keys(node->key(i), node->key(i - 1)));
2378 }
2379 int count = node->count();
2380 if (!node->leaf()) {
2381 for (int i = 0; i <= node->count(); ++i) {
2382 ceph_assert(node->child(i) != NULL);
2383 ceph_assert(node->child(i)->parent() == node);
2384 ceph_assert(node->child(i)->position() == i);
2385 count += internal_verify(
2386 node->child(i),
2387 (i == 0) ? lo : &node->key(i - 1),
2388 (i == node->count()) ? hi : &node->key(i));
2389 }
2390 }
2391 return count;
2392 }
2393
2394 } // namespace btree
2395
2396 #endif // UTIL_BTREE_BTREE_H__
2397