Chameleon

Chameleon Svn Source Tree

Root/branches/cparm/i386/libsa/rb.h

1/******************************************************************************
2 *
3 * Copyright (C) 2008 Jason Evans <jasone@FreeBSD.org>.
4 * All rights reserved.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:
9 * 1. Redistributions of source code must retain the above copyright
10 * notice(s), this list of conditions and the following disclaimer
11 * unmodified other than the allowable addition of one or more
12 * copyright notices.
13 * 2. Redistributions in binary form must reproduce the above copyright
14 * notice(s), this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
16 * distribution.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) ``AS IS'' AND ANY
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
21 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S) BE
22 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
25 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
26 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
27 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
28 * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 *
30 ******************************************************************************
31 *
32 * cpp macro implementation of left-leaning red-black trees.
33 *
34 * Usage:
35 *
36 * (Optional.)
37 * #define SIZEOF_PTR ...
38 * #define SIZEOF_PTR_2POW ...
39 * #define RB_NO_C99_VARARRAYS
40 *
41 * (Optional, see assert(3).)
42 * #define NDEBUG
43 *
44 * (Required.)
45 * #include <assert.h>
46 * #include <rb.h>
47 * ...
48 *
49 * All operations are done non-recursively. Parent pointers are not used, and
50 * color bits are stored in the least significant bit of right-child pointers,
51 * thus making node linkage as compact as is possible for red-black trees.
52 *
53 * Some macros use a comparison function pointer, which is expected to have the
54 * following prototype:
55 *
56 * int (a_cmp *)(a_type *a_node, a_type *a_other);
57 * ^^^^^^
58 * or a_key
59 *
60 * Interpretation of comparision function return values:
61 *
62 * -1 : a_node < a_other
63 * 0 : a_node == a_other
64 * 1 : a_node > a_other
65 *
66 * In all cases, the a_node or a_key macro argument is the first argument to the
67 * comparison function, which makes it possible to write comparison functions
68 * that treat the first argument specially.
69 *
70 ******************************************************************************/
71
72#ifndef RB_H_
73#defineRB_H_
74
75#if 0
76#include <sys/cdefs.h>
77__FBSDID("$FreeBSD: head/lib/libc/stdlib/rb.h 178995 2008-05-14 18:33:13Z jasone $");
78#endif
79
80/* Node structure. */
81#definerb_node(a_type)\
82struct {\
83a_type *rbn_left;\
84a_type *rbn_right_red;\
85}
86
87/* Root structure. */
88#definerb_tree(a_type)\
89struct {\
90a_type *rbt_root;\
91a_type rbt_nil;\
92}
93
94/* Left accessors. */
95#definerbp_left_get(a_type, a_field, a_node)\
96((a_node)->a_field.rbn_left)
97#definerbp_left_set(a_type, a_field, a_node, a_left) do {\
98(a_node)->a_field.rbn_left = a_left;\
99} while (0)
100
101/* Right accessors. */
102#definerbp_right_get(a_type, a_field, a_node)\
103((a_type *) (((intptr_t) (a_node)->a_field.rbn_right_red)\
104& ((ssize_t)-2)))
105#definerbp_right_set(a_type, a_field, a_node, a_right) do {\
106(a_node)->a_field.rbn_right_red = (a_type *) (((uintptr_t) a_right)\
107| (((uintptr_t) (a_node)->a_field.rbn_right_red) & ((size_t)1)));\
108} while (0)
109
110/* Color accessors. */
111#definerbp_red_get(a_type, a_field, a_node)\
112((bool) (((uintptr_t) (a_node)->a_field.rbn_right_red)\
113& ((size_t)1)))
114#definerbp_color_set(a_type, a_field, a_node, a_red) do {\
115(a_node)->a_field.rbn_right_red = (a_type *) ((((intptr_t)\
116(a_node)->a_field.rbn_right_red) & ((ssize_t)-2))\
117| ((ssize_t)a_red));\
118} while (0)
119#definerbp_red_set(a_type, a_field, a_node) do {\
120(a_node)->a_field.rbn_right_red = (a_type *) (((uintptr_t)\
121(a_node)->a_field.rbn_right_red) | ((size_t)1));\
122} while (0)
123#definerbp_black_set(a_type, a_field, a_node) do {\
124(a_node)->a_field.rbn_right_red = (a_type *) (((intptr_t)\
125(a_node)->a_field.rbn_right_red) & ((ssize_t)-2));\
126} while (0)
127
128/* Node initializer. */
129#definerbp_node_new(a_type, a_field, a_tree, a_node) do {\
130rbp_left_set(a_type, a_field, (a_node), &(a_tree)->rbt_nil);\
131rbp_right_set(a_type, a_field, (a_node), &(a_tree)->rbt_nil);\
132rbp_red_set(a_type, a_field, (a_node));\
133} while (0)
134
135/* Tree initializer. */
136#definerb_new(a_type, a_field, a_tree) do {\
137(a_tree)->rbt_root = &(a_tree)->rbt_nil;\
138rbp_node_new(a_type, a_field, a_tree, &(a_tree)->rbt_nil);\
139rbp_black_set(a_type, a_field, &(a_tree)->rbt_nil);\
140} while (0)
141
142/* Tree operations. */
143#definerbp_black_height(a_type, a_field, a_tree, r_height) do {\
144a_type *rbp_bh_t;\
145for (rbp_bh_t = (a_tree)->rbt_root, (r_height) = 0;\
146rbp_bh_t != &(a_tree)->rbt_nil;\
147rbp_bh_t = rbp_left_get(a_type, a_field, rbp_bh_t)) {\
148if (rbp_red_get(a_type, a_field, rbp_bh_t) == false) {\
149(r_height)++;\
150}\
151}\
152} while (0)
153
154#definerbp_first(a_type, a_field, a_tree, a_root, r_node) do {\
155for ((r_node) = (a_root);\
156rbp_left_get(a_type, a_field, (r_node)) != &(a_tree)->rbt_nil;\
157(r_node) = rbp_left_get(a_type, a_field, (r_node))) {\
158}\
159} while (0)
160
161#definerbp_last(a_type, a_field, a_tree, a_root, r_node) do {\
162for ((r_node) = (a_root);\
163rbp_right_get(a_type, a_field, (r_node)) != &(a_tree)->rbt_nil;\
164(r_node) = rbp_right_get(a_type, a_field, (r_node))) {\
165}\
166} while (0)
167
168#definerbp_next(a_type, a_field, a_cmp, a_tree, a_node, r_node) do {\
169if (rbp_right_get(a_type, a_field, (a_node))\
170!= &(a_tree)->rbt_nil) {\
171rbp_first(a_type, a_field, a_tree, rbp_right_get(a_type,\
172a_field, (a_node)), (r_node));\
173} else {\
174a_type *rbp_n_t = (a_tree)->rbt_root;\
175assert(rbp_n_t != &(a_tree)->rbt_nil);\
176(r_node) = &(a_tree)->rbt_nil;\
177while (true) {\
178int rbp_n_cmp = (a_cmp)((a_node), rbp_n_t);\
179if (rbp_n_cmp < 0) {\
180(r_node) = rbp_n_t;\
181rbp_n_t = rbp_left_get(a_type, a_field, rbp_n_t);\
182} else if (rbp_n_cmp > 0) {\
183rbp_n_t = rbp_right_get(a_type, a_field, rbp_n_t);\
184} else {\
185break;\
186}\
187assert(rbp_n_t != &(a_tree)->rbt_nil);\
188}\
189}\
190} while (0)
191
192#definerbp_prev(a_type, a_field, a_cmp, a_tree, a_node, r_node) do {\
193if (rbp_left_get(a_type, a_field, (a_node)) != &(a_tree)->rbt_nil) {\
194rbp_last(a_type, a_field, a_tree, rbp_left_get(a_type,\
195a_field, (a_node)), (r_node));\
196} else {\
197a_type *rbp_p_t = (a_tree)->rbt_root;\
198assert(rbp_p_t != &(a_tree)->rbt_nil);\
199(r_node) = &(a_tree)->rbt_nil;\
200while (true) {\
201int rbp_p_cmp = (a_cmp)((a_node), rbp_p_t);\
202if (rbp_p_cmp < 0) {\
203rbp_p_t = rbp_left_get(a_type, a_field, rbp_p_t);\
204} else if (rbp_p_cmp > 0) {\
205(r_node) = rbp_p_t;\
206rbp_p_t = rbp_right_get(a_type, a_field, rbp_p_t);\
207} else {\
208break;\
209}\
210assert(rbp_p_t != &(a_tree)->rbt_nil);\
211}\
212}\
213} while (0)
214
215#definerb_first(a_type, a_field, a_tree, r_node) do {\
216rbp_first(a_type, a_field, a_tree, (a_tree)->rbt_root, (r_node));\
217if ((r_node) == &(a_tree)->rbt_nil) {\
218(r_node) = NULL;\
219}\
220} while (0)
221
222#definerb_last(a_type, a_field, a_tree, r_node) do {\
223rbp_last(a_type, a_field, a_tree, (a_tree)->rbt_root, r_node);\
224if ((r_node) == &(a_tree)->rbt_nil) {\
225(r_node) = NULL;\
226}\
227} while (0)
228
229#definerb_next(a_type, a_field, a_cmp, a_tree, a_node, r_node) do {\
230rbp_next(a_type, a_field, a_cmp, a_tree, (a_node), (r_node));\
231if ((r_node) == &(a_tree)->rbt_nil) {\
232(r_node) = NULL;\
233}\
234} while (0)
235
236#definerb_prev(a_type, a_field, a_cmp, a_tree, a_node, r_node) do {\
237rbp_prev(a_type, a_field, a_cmp, a_tree, (a_node), (r_node));\
238if ((r_node) == &(a_tree)->rbt_nil) {\
239(r_node) = NULL;\
240}\
241} while (0)
242
243#definerb_search(a_type, a_field, a_cmp, a_tree, a_key, r_node) do {\
244int rbp_se_cmp;\
245(r_node) = (a_tree)->rbt_root;\
246while ((r_node) != &(a_tree)->rbt_nil\
247&& (rbp_se_cmp = (a_cmp)((a_key), (r_node))) != 0) {\
248if (rbp_se_cmp < 0) {\
249(r_node) = rbp_left_get(a_type, a_field, (r_node));\
250} else {\
251(r_node) = rbp_right_get(a_type, a_field, (r_node));\
252}\
253}\
254if ((r_node) == &(a_tree)->rbt_nil) {\
255(r_node) = NULL;\
256}\
257} while (0)
258
259/*
260 * Find a match if it exists. Otherwise, find the next greater node, if one
261 * exists.
262 */
263#definerb_nsearch(a_type, a_field, a_cmp, a_tree, a_key, r_node) do {\
264a_type *rbp_ns_t = (a_tree)->rbt_root;\
265(r_node) = NULL;\
266while (rbp_ns_t != &(a_tree)->rbt_nil) {\
267int rbp_ns_cmp = (a_cmp)((a_key), rbp_ns_t);\
268if (rbp_ns_cmp < 0) {\
269(r_node) = rbp_ns_t;\
270rbp_ns_t = rbp_left_get(a_type, a_field, rbp_ns_t);\
271} else if (rbp_ns_cmp > 0) {\
272rbp_ns_t = rbp_right_get(a_type, a_field, rbp_ns_t);\
273} else {\
274(r_node) = rbp_ns_t;\
275break;\
276}\
277}\
278} while (0)
279
280/*
281 * Find a match if it exists. Otherwise, find the previous lesser node, if one
282 * exists.
283 */
284#definerb_psearch(a_type, a_field, a_cmp, a_tree, a_key, r_node) do {\
285a_type *rbp_ps_t = (a_tree)->rbt_root;\
286(r_node) = NULL;\
287while (rbp_ps_t != &(a_tree)->rbt_nil) {\
288int rbp_ps_cmp = (a_cmp)((a_key), rbp_ps_t);\
289if (rbp_ps_cmp < 0) {\
290rbp_ps_t = rbp_left_get(a_type, a_field, rbp_ps_t);\
291} else if (rbp_ps_cmp > 0) {\
292(r_node) = rbp_ps_t;\
293rbp_ps_t = rbp_right_get(a_type, a_field, rbp_ps_t);\
294} else {\
295(r_node) = rbp_ps_t;\
296break;\
297}\
298}\
299} while (0)
300
301#definerbp_rotate_left(a_type, a_field, a_node, r_node) do {\
302(r_node) = rbp_right_get(a_type, a_field, (a_node));\
303rbp_right_set(a_type, a_field, (a_node),\
304rbp_left_get(a_type, a_field, (r_node)));\
305rbp_left_set(a_type, a_field, (r_node), (a_node));\
306} while (0)
307
308#definerbp_rotate_right(a_type, a_field, a_node, r_node) do {\
309(r_node) = rbp_left_get(a_type, a_field, (a_node));\
310rbp_left_set(a_type, a_field, (a_node),\
311rbp_right_get(a_type, a_field, (r_node)));\
312rbp_right_set(a_type, a_field, (r_node), (a_node));\
313} while (0)
314
315#definerbp_lean_left(a_type, a_field, a_node, r_node) do {\
316bool rbp_ll_red;\
317rbp_rotate_left(a_type, a_field, (a_node), (r_node));\
318rbp_ll_red = rbp_red_get(a_type, a_field, (a_node));\
319rbp_color_set(a_type, a_field, (r_node), rbp_ll_red);\
320rbp_red_set(a_type, a_field, (a_node));\
321} while (0)
322
323#definerbp_lean_right(a_type, a_field, a_node, r_node) do {\
324bool rbp_lr_red;\
325rbp_rotate_right(a_type, a_field, (a_node), (r_node));\
326rbp_lr_red = rbp_red_get(a_type, a_field, (a_node));\
327rbp_color_set(a_type, a_field, (r_node), rbp_lr_red);\
328rbp_red_set(a_type, a_field, (a_node));\
329} while (0)
330
331#definerbp_move_red_left(a_type, a_field, a_node, r_node) do {\
332a_type *rbp_mrl_t, *rbp_mrl_u;\
333rbp_mrl_t = rbp_left_get(a_type, a_field, (a_node));\
334rbp_red_set(a_type, a_field, rbp_mrl_t);\
335rbp_mrl_t = rbp_right_get(a_type, a_field, (a_node));\
336rbp_mrl_u = rbp_left_get(a_type, a_field, rbp_mrl_t);\
337if (rbp_red_get(a_type, a_field, rbp_mrl_u)) {\
338rbp_rotate_right(a_type, a_field, rbp_mrl_t, rbp_mrl_u);\
339rbp_right_set(a_type, a_field, (a_node), rbp_mrl_u);\
340rbp_rotate_left(a_type, a_field, (a_node), (r_node));\
341rbp_mrl_t = rbp_right_get(a_type, a_field, (a_node));\
342if (rbp_red_get(a_type, a_field, rbp_mrl_t)) {\
343rbp_black_set(a_type, a_field, rbp_mrl_t);\
344rbp_red_set(a_type, a_field, (a_node));\
345rbp_rotate_left(a_type, a_field, (a_node), rbp_mrl_t);\
346rbp_left_set(a_type, a_field, (r_node), rbp_mrl_t);\
347} else {\
348rbp_black_set(a_type, a_field, (a_node));\
349}\
350} else {\
351rbp_red_set(a_type, a_field, (a_node));\
352rbp_rotate_left(a_type, a_field, (a_node), (r_node));\
353}\
354} while (0)
355
356#definerbp_move_red_right(a_type, a_field, a_node, r_node) do {\
357a_type *rbp_mrr_t;\
358rbp_mrr_t = rbp_left_get(a_type, a_field, (a_node));\
359if (rbp_red_get(a_type, a_field, rbp_mrr_t)) {\
360a_type *rbp_mrr_u, *rbp_mrr_v;\
361rbp_mrr_u = rbp_right_get(a_type, a_field, rbp_mrr_t);\
362rbp_mrr_v = rbp_left_get(a_type, a_field, rbp_mrr_u);\
363if (rbp_red_get(a_type, a_field, rbp_mrr_v)) {\
364rbp_color_set(a_type, a_field, rbp_mrr_u,\
365rbp_red_get(a_type, a_field, (a_node)));\
366rbp_black_set(a_type, a_field, rbp_mrr_v);\
367rbp_rotate_left(a_type, a_field, rbp_mrr_t, rbp_mrr_u);\
368rbp_left_set(a_type, a_field, (a_node), rbp_mrr_u);\
369rbp_rotate_right(a_type, a_field, (a_node), (r_node));\
370rbp_rotate_left(a_type, a_field, (a_node), rbp_mrr_t);\
371rbp_right_set(a_type, a_field, (r_node), rbp_mrr_t);\
372} else {\
373rbp_color_set(a_type, a_field, rbp_mrr_t,\
374rbp_red_get(a_type, a_field, (a_node)));\
375rbp_red_set(a_type, a_field, rbp_mrr_u);\
376rbp_rotate_right(a_type, a_field, (a_node), (r_node));\
377rbp_rotate_left(a_type, a_field, (a_node), rbp_mrr_t);\
378rbp_right_set(a_type, a_field, (r_node), rbp_mrr_t);\
379}\
380rbp_red_set(a_type, a_field, (a_node));\
381} else {\
382rbp_red_set(a_type, a_field, rbp_mrr_t);\
383rbp_mrr_t = rbp_left_get(a_type, a_field, rbp_mrr_t);\
384if (rbp_red_get(a_type, a_field, rbp_mrr_t)) {\
385rbp_black_set(a_type, a_field, rbp_mrr_t);\
386rbp_rotate_right(a_type, a_field, (a_node), (r_node));\
387rbp_rotate_left(a_type, a_field, (a_node), rbp_mrr_t);\
388rbp_right_set(a_type, a_field, (r_node), rbp_mrr_t);\
389} else {\
390rbp_rotate_left(a_type, a_field, (a_node), (r_node));\
391}\
392}\
393} while (0)
394
395#definerb_insert(a_type, a_field, a_cmp, a_tree, a_node) do {\
396a_type rbp_i_s;\
397a_type *rbp_i_g, *rbp_i_p, *rbp_i_c, *rbp_i_t, *rbp_i_u;\
398int rbp_i_cmp = 0;\
399rbp_i_g = &(a_tree)->rbt_nil;\
400rbp_left_set(a_type, a_field, &rbp_i_s, (a_tree)->rbt_root);\
401rbp_right_set(a_type, a_field, &rbp_i_s, &(a_tree)->rbt_nil);\
402rbp_black_set(a_type, a_field, &rbp_i_s);\
403rbp_i_p = &rbp_i_s;\
404rbp_i_c = (a_tree)->rbt_root;\
405/* Iteratively search down the tree for the insertion point, */\
406/* splitting 4-nodes as they are encountered. At the end of each */\
407/* iteration, rbp_i_g->rbp_i_p->rbp_i_c is a 3-level path down */\
408/* the tree, assuming a sufficiently deep tree. */\
409while (rbp_i_c != &(a_tree)->rbt_nil) {\
410rbp_i_t = rbp_left_get(a_type, a_field, rbp_i_c);\
411rbp_i_u = rbp_left_get(a_type, a_field, rbp_i_t);\
412if (rbp_red_get(a_type, a_field, rbp_i_t)\
413&& rbp_red_get(a_type, a_field, rbp_i_u)) {\
414/* rbp_i_c is the top of a logical 4-node, so split it. */\
415/* This iteration does not move down the tree, due to the */\
416/* disruptiveness of node splitting. */\
417/* */\
418/* Rotate right. */\
419rbp_rotate_right(a_type, a_field, rbp_i_c, rbp_i_t);\
420/* Pass red links up one level. */\
421rbp_i_u = rbp_left_get(a_type, a_field, rbp_i_t);\
422rbp_black_set(a_type, a_field, rbp_i_u);\
423if (rbp_left_get(a_type, a_field, rbp_i_p) == rbp_i_c) {\
424rbp_left_set(a_type, a_field, rbp_i_p, rbp_i_t);\
425rbp_i_c = rbp_i_t;\
426} else {\
427/* rbp_i_c was the right child of rbp_i_p, so rotate */\
428/* left in order to maintain the left-leaning */\
429/* invariant. */\
430assert(rbp_right_get(a_type, a_field, rbp_i_p)\
431== rbp_i_c);\
432rbp_right_set(a_type, a_field, rbp_i_p, rbp_i_t);\
433rbp_lean_left(a_type, a_field, rbp_i_p, rbp_i_u);\
434if (rbp_left_get(a_type, a_field, rbp_i_g) == rbp_i_p) {\
435rbp_left_set(a_type, a_field, rbp_i_g, rbp_i_u);\
436} else {\
437assert(rbp_right_get(a_type, a_field, rbp_i_g)\
438== rbp_i_p);\
439rbp_right_set(a_type, a_field, rbp_i_g, rbp_i_u);\
440}\
441rbp_i_p = rbp_i_u;\
442rbp_i_cmp = (a_cmp)((a_node), rbp_i_p);\
443if (rbp_i_cmp < 0) {\
444rbp_i_c = rbp_left_get(a_type, a_field, rbp_i_p);\
445} else {\
446assert(rbp_i_cmp > 0);\
447rbp_i_c = rbp_right_get(a_type, a_field, rbp_i_p);\
448}\
449continue;\
450}\
451}\
452rbp_i_g = rbp_i_p;\
453rbp_i_p = rbp_i_c;\
454rbp_i_cmp = (a_cmp)((a_node), rbp_i_c);\
455if (rbp_i_cmp < 0) {\
456rbp_i_c = rbp_left_get(a_type, a_field, rbp_i_c);\
457} else {\
458assert(rbp_i_cmp > 0);\
459rbp_i_c = rbp_right_get(a_type, a_field, rbp_i_c);\
460}\
461}\
462/* rbp_i_p now refers to the node under which to insert. */\
463rbp_node_new(a_type, a_field, a_tree, (a_node));\
464if (rbp_i_cmp > 0) {\
465rbp_right_set(a_type, a_field, rbp_i_p, (a_node));\
466rbp_lean_left(a_type, a_field, rbp_i_p, rbp_i_t);\
467if (rbp_left_get(a_type, a_field, rbp_i_g) == rbp_i_p) {\
468rbp_left_set(a_type, a_field, rbp_i_g, rbp_i_t);\
469} else if (rbp_right_get(a_type, a_field, rbp_i_g) == rbp_i_p) {\
470rbp_right_set(a_type, a_field, rbp_i_g, rbp_i_t);\
471}\
472} else {\
473rbp_left_set(a_type, a_field, rbp_i_p, (a_node));\
474}\
475/* Update the root and make sure that it is black. */\
476(a_tree)->rbt_root = rbp_left_get(a_type, a_field, &rbp_i_s);\
477rbp_black_set(a_type, a_field, (a_tree)->rbt_root);\
478} while (0)
479
480#definerb_remove(a_type, a_field, a_cmp, a_tree, a_node) do {\
481a_type rbp_r_s;\
482a_type *rbp_r_p, *rbp_r_c, *rbp_r_xp, *rbp_r_t, *rbp_r_u;\
483int rbp_r_cmp;\
484rbp_left_set(a_type, a_field, &rbp_r_s, (a_tree)->rbt_root);\
485rbp_right_set(a_type, a_field, &rbp_r_s, &(a_tree)->rbt_nil);\
486rbp_black_set(a_type, a_field, &rbp_r_s);\
487rbp_r_p = &rbp_r_s;\
488rbp_r_c = (a_tree)->rbt_root;\
489rbp_r_xp = &(a_tree)->rbt_nil;\
490/* Iterate down the tree, but always transform 2-nodes to 3- or */\
491/* 4-nodes in order to maintain the invariant that the current */\
492/* node is not a 2-node. This allows simple deletion once a leaf */\
493/* is reached. Handle the root specially though, since there may */\
494/* be no way to convert it from a 2-node to a 3-node. */\
495rbp_r_cmp = (a_cmp)((a_node), rbp_r_c);\
496if (rbp_r_cmp < 0) {\
497rbp_r_t = rbp_left_get(a_type, a_field, rbp_r_c);\
498rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t);\
499if (rbp_red_get(a_type, a_field, rbp_r_t) == false\
500&& rbp_red_get(a_type, a_field, rbp_r_u) == false) {\
501/* Apply standard transform to prepare for left move. */\
502rbp_move_red_left(a_type, a_field, rbp_r_c, rbp_r_t);\
503rbp_black_set(a_type, a_field, rbp_r_t);\
504rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t);\
505rbp_r_c = rbp_r_t;\
506} else {\
507/* Move left. */\
508rbp_r_p = rbp_r_c;\
509rbp_r_c = rbp_left_get(a_type, a_field, rbp_r_c);\
510}\
511} else {\
512if (rbp_r_cmp == 0) {\
513assert((a_node) == rbp_r_c);\
514if (rbp_right_get(a_type, a_field, rbp_r_c)\
515== &(a_tree)->rbt_nil) {\
516/* Delete root node (which is also a leaf node). */\
517if (rbp_left_get(a_type, a_field, rbp_r_c)\
518!= &(a_tree)->rbt_nil) {\
519rbp_lean_right(a_type, a_field, rbp_r_c, rbp_r_t);\
520rbp_right_set(a_type, a_field, rbp_r_t,\
521&(a_tree)->rbt_nil);\
522} else {\
523rbp_r_t = &(a_tree)->rbt_nil;\
524}\
525rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t);\
526} else {\
527/* This is the node we want to delete, but we will */\
528/* instead swap it with its successor and delete the */\
529/* successor. Record enough information to do the */\
530/* swap later. rbp_r_xp is the a_node's parent. */\
531rbp_r_xp = rbp_r_p;\
532rbp_r_cmp = 1; /* Note that deletion is incomplete. */\
533}\
534}\
535if (rbp_r_cmp == 1) {\
536if (rbp_red_get(a_type, a_field, rbp_left_get(a_type,\
537a_field, rbp_right_get(a_type, a_field, rbp_r_c)))\
538== false) {\
539rbp_r_t = rbp_left_get(a_type, a_field, rbp_r_c);\
540if (rbp_red_get(a_type, a_field, rbp_r_t)) {\
541/* Standard transform. */\
542rbp_move_red_right(a_type, a_field, rbp_r_c,\
543rbp_r_t);\
544} else {\
545/* Root-specific transform. */\
546rbp_red_set(a_type, a_field, rbp_r_c);\
547rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t);\
548if (rbp_red_get(a_type, a_field, rbp_r_u)) {\
549rbp_black_set(a_type, a_field, rbp_r_u);\
550rbp_rotate_right(a_type, a_field, rbp_r_c,\
551rbp_r_t);\
552rbp_rotate_left(a_type, a_field, rbp_r_c,\
553rbp_r_u);\
554rbp_right_set(a_type, a_field, rbp_r_t,\
555rbp_r_u);\
556} else {\
557rbp_red_set(a_type, a_field, rbp_r_t);\
558rbp_rotate_left(a_type, a_field, rbp_r_c,\
559rbp_r_t);\
560}\
561}\
562rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t);\
563rbp_r_c = rbp_r_t;\
564} else {\
565/* Move right. */\
566rbp_r_p = rbp_r_c;\
567rbp_r_c = rbp_right_get(a_type, a_field, rbp_r_c);\
568}\
569}\
570}\
571if (rbp_r_cmp != 0) {\
572while (true) {\
573assert(rbp_r_p != &(a_tree)->rbt_nil);\
574rbp_r_cmp = (a_cmp)((a_node), rbp_r_c);\
575if (rbp_r_cmp < 0) {\
576rbp_r_t = rbp_left_get(a_type, a_field, rbp_r_c);\
577if (rbp_r_t == &(a_tree)->rbt_nil) {\
578/* rbp_r_c now refers to the successor node to */\
579/* relocate, and rbp_r_xp/a_node refer to the */\
580/* context for the relocation. */\
581if (rbp_left_get(a_type, a_field, rbp_r_xp)\
582== (a_node)) {\
583rbp_left_set(a_type, a_field, rbp_r_xp,\
584rbp_r_c);\
585} else {\
586assert(rbp_right_get(a_type, a_field,\
587rbp_r_xp) == (a_node));\
588rbp_right_set(a_type, a_field, rbp_r_xp,\
589rbp_r_c);\
590}\
591rbp_left_set(a_type, a_field, rbp_r_c,\
592rbp_left_get(a_type, a_field, (a_node)));\
593rbp_right_set(a_type, a_field, rbp_r_c,\
594rbp_right_get(a_type, a_field, (a_node)));\
595rbp_color_set(a_type, a_field, rbp_r_c,\
596rbp_red_get(a_type, a_field, (a_node)));\
597if (rbp_left_get(a_type, a_field, rbp_r_p)\
598== rbp_r_c) {\
599rbp_left_set(a_type, a_field, rbp_r_p,\
600&(a_tree)->rbt_nil);\
601} else {\
602assert(rbp_right_get(a_type, a_field, rbp_r_p)\
603== rbp_r_c);\
604rbp_right_set(a_type, a_field, rbp_r_p,\
605&(a_tree)->rbt_nil);\
606}\
607break;\
608}\
609rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t);\
610if (rbp_red_get(a_type, a_field, rbp_r_t) == false\
611&& rbp_red_get(a_type, a_field, rbp_r_u) == false) {\
612rbp_move_red_left(a_type, a_field, rbp_r_c,\
613rbp_r_t);\
614if (rbp_left_get(a_type, a_field, rbp_r_p)\
615== rbp_r_c) {\
616rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t);\
617} else {\
618rbp_right_set(a_type, a_field, rbp_r_p,\
619rbp_r_t);\
620}\
621rbp_r_c = rbp_r_t;\
622} else {\
623rbp_r_p = rbp_r_c;\
624rbp_r_c = rbp_left_get(a_type, a_field, rbp_r_c);\
625}\
626} else {\
627/* Check whether to delete this node (it has to be */\
628/* the correct node and a leaf node). */\
629if (rbp_r_cmp == 0) {\
630assert((a_node) == rbp_r_c);\
631if (rbp_right_get(a_type, a_field, rbp_r_c)\
632== &(a_tree)->rbt_nil) {\
633/* Delete leaf node. */\
634if (rbp_left_get(a_type, a_field, rbp_r_c)\
635!= &(a_tree)->rbt_nil) {\
636rbp_lean_right(a_type, a_field, rbp_r_c,\
637rbp_r_t);\
638rbp_right_set(a_type, a_field, rbp_r_t,\
639&(a_tree)->rbt_nil);\
640} else {\
641rbp_r_t = &(a_tree)->rbt_nil;\
642}\
643if (rbp_left_get(a_type, a_field, rbp_r_p)\
644== rbp_r_c) {\
645rbp_left_set(a_type, a_field, rbp_r_p,\
646rbp_r_t);\
647} else {\
648rbp_right_set(a_type, a_field, rbp_r_p,\
649rbp_r_t);\
650}\
651break;\
652} else {\
653/* This is the node we want to delete, but we */\
654/* will instead swap it with its successor */\
655/* and delete the successor. Record enough */\
656/* information to do the swap later. */\
657/* rbp_r_xp is a_node's parent. */\
658rbp_r_xp = rbp_r_p;\
659}\
660}\
661rbp_r_t = rbp_right_get(a_type, a_field, rbp_r_c);\
662rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t);\
663if (rbp_red_get(a_type, a_field, rbp_r_u) == false) {\
664rbp_move_red_right(a_type, a_field, rbp_r_c,\
665rbp_r_t);\
666if (rbp_left_get(a_type, a_field, rbp_r_p)\
667== rbp_r_c) {\
668rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t);\
669} else {\
670rbp_right_set(a_type, a_field, rbp_r_p,\
671rbp_r_t);\
672}\
673rbp_r_c = rbp_r_t;\
674} else {\
675rbp_r_p = rbp_r_c;\
676rbp_r_c = rbp_right_get(a_type, a_field, rbp_r_c);\
677}\
678}\
679}\
680}\
681/* Update root. */\
682(a_tree)->rbt_root = rbp_left_get(a_type, a_field, &rbp_r_s);\
683} while (0)
684
685/*
686 * The rb_wrap() macro provides a convenient way to wrap functions around the
687 * cpp macros. The main benefits of wrapping are that 1) repeated macro
688 * expansion can cause code bloat, especially for rb_{insert,remove)(), and
689 * 2) type, linkage, comparison functions, etc. need not be specified at every
690 * call point.
691 */
692
693#definerb_wrap(a_attr, a_prefix, a_tree_type, a_type, a_field, a_cmp)\
694a_attr void\
695a_prefix##new(a_tree_type *tree) {\
696rb_new(a_type, a_field, tree);\
697}\
698a_attr a_type *\
699a_prefix##first(a_tree_type *tree) {\
700a_type *ret;\
701rb_first(a_type, a_field, tree, ret);\
702return (ret);\
703}\
704a_attr a_type *\
705a_prefix##last(a_tree_type *tree) {\
706a_type *ret;\
707rb_last(a_type, a_field, tree, ret);\
708return (ret);\
709}\
710a_attr a_type *\
711a_prefix##next(a_tree_type *tree, a_type *node) {\
712a_type *ret;\
713rb_next(a_type, a_field, a_cmp, tree, node, ret);\
714return (ret);\
715}\
716a_attr a_type *\
717a_prefix##prev(a_tree_type *tree, a_type *node) {\
718a_type *ret;\
719rb_prev(a_type, a_field, a_cmp, tree, node, ret);\
720return (ret);\
721}\
722a_attr a_type *\
723a_prefix##search(a_tree_type *tree, a_type *key) {\
724a_type *ret;\
725rb_search(a_type, a_field, a_cmp, tree, key, ret);\
726return (ret);\
727}\
728a_attr a_type *\
729a_prefix##nsearch(a_tree_type *tree, a_type *key) {\
730a_type *ret;\
731rb_nsearch(a_type, a_field, a_cmp, tree, key, ret);\
732return (ret);\
733}\
734a_attr a_type *\
735a_prefix##psearch(a_tree_type *tree, a_type *key) {\
736a_type *ret;\
737rb_psearch(a_type, a_field, a_cmp, tree, key, ret);\
738return (ret);\
739}\
740a_attr void\
741a_prefix##insert(a_tree_type *tree, a_type *node) {\
742rb_insert(a_type, a_field, a_cmp, tree, node);\
743}\
744a_attr void\
745a_prefix##remove(a_tree_type *tree, a_type *node) {\
746rb_remove(a_type, a_field, a_cmp, tree, node);\
747}
748
749/*
750 * The iterators simulate recursion via an array of pointers that store the
751 * current path. This is critical to performance, since a series of calls to
752 * rb_{next,prev}() would require time proportional to (n lg n), whereas this
753 * implementation only requires time proportional to (n).
754 *
755 * Since the iterators cache a path down the tree, any tree modification may
756 * cause the cached path to become invalid. In order to continue iteration,
757 * use something like the following sequence:
758 *
759 * {
760 * a_type *node, *tnode;
761 *
762 * rb_foreach_begin(a_type, a_field, a_tree, node) {
763 * ...
764 * rb_next(a_type, a_field, a_cmp, a_tree, node, tnode);
765 * rb_remove(a_type, a_field, a_cmp, a_tree, node);
766 * rb_foreach_next(a_type, a_field, a_cmp, a_tree, tnode);
767 * ...
768 * } rb_foreach_end(a_type, a_field, a_tree, node)
769 * }
770 *
771 * Note that this idiom is not advised if every iteration modifies the tree,
772 * since in that case there is no algorithmic complexity improvement over a
773 * series of rb_{next,prev}() calls, thus making the setup overhead wasted
774 * effort.
775 */
776
777#ifdef RB_NO_C99_VARARRAYS
778/*
779 * Avoid using variable-length arrays, at the cost of using more stack space.
780 * Size the path arrays such that they are always large enough, even if a
781 * tree consumes all of memory. Since each node must contain a minimum of
782 * two pointers, there can never be more nodes than:
783 *
784 * 1 << ((SIZEOF_PTR<<3) - (SIZEOF_PTR_2POW+1))
785 *
786 * Since the depth of a tree is limited to 3*lg(#nodes), the maximum depth
787 * is:
788 *
789 * (3 * ((SIZEOF_PTR<<3) - (SIZEOF_PTR_2POW+1)))
790 *
791 * This works out to a maximum depth of 87 and 180 for 32- and 64-bit
792 * systems, respectively (approximatly 348 and 1440 bytes, respectively).
793 */
794# define rbp_compute_f_height(a_type, a_field, a_tree)
795# define rbp_f_height(3 * ((SIZEOF_PTR<<3) - (SIZEOF_PTR_2POW+1)))
796# define rbp_compute_fr_height(a_type, a_field, a_tree)
797# define rbp_fr_height(3 * ((SIZEOF_PTR<<3) - (SIZEOF_PTR_2POW+1)))
798#else
799# define rbp_compute_f_height(a_type, a_field, a_tree)\
800/* Compute the maximum possible tree depth (3X the black height). */\
801unsigned rbp_f_height;\
802rbp_black_height(a_type, a_field, a_tree, rbp_f_height);\
803rbp_f_height *= 3;
804# define rbp_compute_fr_height(a_type, a_field, a_tree)\
805/* Compute the maximum possible tree depth (3X the black height). */\
806unsigned rbp_fr_height;\
807rbp_black_height(a_type, a_field, a_tree, rbp_fr_height);\
808rbp_fr_height *= 3;
809#endif
810
811#definerb_foreach_begin(a_type, a_field, a_tree, a_var) {\
812rbp_compute_f_height(a_type, a_field, a_tree)\
813{\
814/* Initialize the path to contain the left spine. */\
815a_type *rbp_f_path[rbp_f_height];\
816a_type *rbp_f_node;\
817bool rbp_f_synced = false;\
818unsigned rbp_f_depth = 0;\
819if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) {\
820rbp_f_path[rbp_f_depth] = (a_tree)->rbt_root;\
821rbp_f_depth++;\
822while ((rbp_f_node = rbp_left_get(a_type, a_field,\
823rbp_f_path[rbp_f_depth-1])) != &(a_tree)->rbt_nil) {\
824rbp_f_path[rbp_f_depth] = rbp_f_node;\
825rbp_f_depth++;\
826}\
827}\
828/* While the path is non-empty, iterate. */\
829while (rbp_f_depth > 0) {\
830(a_var) = rbp_f_path[rbp_f_depth-1];
831
832/* Only use if modifying the tree during iteration. */
833#definerb_foreach_next(a_type, a_field, a_cmp, a_tree, a_node)\
834/* Re-initialize the path to contain the path to a_node. */\
835rbp_f_depth = 0;\
836if (a_node != NULL) {\
837if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) {\
838rbp_f_path[rbp_f_depth] = (a_tree)->rbt_root;\
839rbp_f_depth++;\
840rbp_f_node = rbp_f_path[0];\
841while (true) {\
842int rbp_f_cmp = (a_cmp)((a_node),\
843rbp_f_path[rbp_f_depth-1]);\
844if (rbp_f_cmp < 0) {\
845rbp_f_node = rbp_left_get(a_type, a_field,\
846rbp_f_path[rbp_f_depth-1]);\
847} else if (rbp_f_cmp > 0) {\
848rbp_f_node = rbp_right_get(a_type, a_field,\
849rbp_f_path[rbp_f_depth-1]);\
850} else {\
851break;\
852}\
853assert(rbp_f_node != &(a_tree)->rbt_nil);\
854rbp_f_path[rbp_f_depth] = rbp_f_node;\
855rbp_f_depth++;\
856}\
857}\
858}\
859rbp_f_synced = true;
860
861#definerb_foreach_end(a_type, a_field, a_tree, a_var)\
862if (rbp_f_synced) {\
863rbp_f_synced = false;\
864continue;\
865}\
866/* Find the successor. */\
867if ((rbp_f_node = rbp_right_get(a_type, a_field,\
868rbp_f_path[rbp_f_depth-1])) != &(a_tree)->rbt_nil) {\
869/* The successor is the left-most node in the right */\
870/* subtree. */\
871rbp_f_path[rbp_f_depth] = rbp_f_node;\
872rbp_f_depth++;\
873while ((rbp_f_node = rbp_left_get(a_type, a_field,\
874rbp_f_path[rbp_f_depth-1])) != &(a_tree)->rbt_nil) {\
875rbp_f_path[rbp_f_depth] = rbp_f_node;\
876rbp_f_depth++;\
877}\
878} else {\
879/* The successor is above the current node. Unwind */\
880/* until a left-leaning edge is removed from the */\
881/* path, or the path is empty. */\
882for (rbp_f_depth--; rbp_f_depth > 0; rbp_f_depth--) {\
883if (rbp_left_get(a_type, a_field,\
884rbp_f_path[rbp_f_depth-1])\
885== rbp_f_path[rbp_f_depth]) {\
886break;\
887}\
888}\
889}\
890}\
891}\
892}
893
894#definerb_foreach_reverse_begin(a_type, a_field, a_tree, a_var) {\
895rbp_compute_fr_height(a_type, a_field, a_tree)\
896{\
897/* Initialize the path to contain the right spine. */\
898a_type *rbp_fr_path[rbp_fr_height];\
899a_type *rbp_fr_node;\
900bool rbp_fr_synced = false;\
901unsigned rbp_fr_depth = 0;\
902if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) {\
903rbp_fr_path[rbp_fr_depth] = (a_tree)->rbt_root;\
904rbp_fr_depth++;\
905while ((rbp_fr_node = rbp_right_get(a_type, a_field,\
906rbp_fr_path[rbp_fr_depth-1])) != &(a_tree)->rbt_nil) {\
907rbp_fr_path[rbp_fr_depth] = rbp_fr_node;\
908rbp_fr_depth++;\
909}\
910}\
911/* While the path is non-empty, iterate. */\
912while (rbp_fr_depth > 0) {\
913(a_var) = rbp_fr_path[rbp_fr_depth-1];
914
915/* Only use if modifying the tree during iteration. */
916#definerb_foreach_reverse_prev(a_type, a_field, a_cmp, a_tree, a_node)\
917/* Re-initialize the path to contain the path to a_node. */\
918rbp_fr_depth = 0;\
919if (a_node != NULL) {\
920if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) {\
921rbp_fr_path[rbp_fr_depth] = (a_tree)->rbt_root;\
922rbp_fr_depth++;\
923rbp_fr_node = rbp_fr_path[0];\
924while (true) {\
925int rbp_fr_cmp = (a_cmp)((a_node),\
926rbp_fr_path[rbp_fr_depth-1]);\
927if (rbp_fr_cmp < 0) {\
928rbp_fr_node = rbp_left_get(a_type, a_field,\
929rbp_fr_path[rbp_fr_depth-1]);\
930} else if (rbp_fr_cmp > 0) {\
931rbp_fr_node = rbp_right_get(a_type, a_field,\
932rbp_fr_path[rbp_fr_depth-1]);\
933} else {\
934break;\
935}\
936assert(rbp_fr_node != &(a_tree)->rbt_nil);\
937rbp_fr_path[rbp_fr_depth] = rbp_fr_node;\
938rbp_fr_depth++;\
939}\
940}\
941}\
942rbp_fr_synced = true;
943
944#definerb_foreach_reverse_end(a_type, a_field, a_tree, a_var)\
945if (rbp_fr_synced) {\
946rbp_fr_synced = false;\
947continue;\
948}\
949if (rbp_fr_depth == 0) {\
950/* rb_foreach_reverse_sync() was called with a NULL */\
951/* a_node. */\
952break;\
953}\
954/* Find the predecessor. */\
955if ((rbp_fr_node = rbp_left_get(a_type, a_field,\
956rbp_fr_path[rbp_fr_depth-1])) != &(a_tree)->rbt_nil) {\
957/* The predecessor is the right-most node in the left */\
958/* subtree. */\
959rbp_fr_path[rbp_fr_depth] = rbp_fr_node;\
960rbp_fr_depth++;\
961while ((rbp_fr_node = rbp_right_get(a_type, a_field,\
962rbp_fr_path[rbp_fr_depth-1])) != &(a_tree)->rbt_nil) {\
963rbp_fr_path[rbp_fr_depth] = rbp_fr_node;\
964rbp_fr_depth++;\
965}\
966} else {\
967/* The predecessor is above the current node. Unwind */\
968/* until a right-leaning edge is removed from the */\
969/* path, or the path is empty. */\
970for (rbp_fr_depth--; rbp_fr_depth > 0; rbp_fr_depth--) {\
971if (rbp_right_get(a_type, a_field,\
972rbp_fr_path[rbp_fr_depth-1])\
973== rbp_fr_path[rbp_fr_depth]) {\
974break;\
975}\
976}\
977}\
978}\
979}\
980}
981
982#endif /* RB_H_ */

Archive Download this file

Revision: 2154