BSD 4_3_Net_2 release
[unix-history] / usr / src / lib / libg++ / g++-include / gen / AVLMap.ccP
CommitLineData
1b7891eb
C
1// This may look like C code, but it is really -*- C++ -*-
2/*
3Copyright (C) 1988 Free Software Foundation
4 written by Doug Lea (dl@rocky.oswego.edu)
5
6This file is part of GNU CC.
7
8GNU CC is distributed in the hope that it will be useful,
9but WITHOUT ANY WARRANTY. No author or distributor
10accepts responsibility to anyone for the consequences of using it
11or for whether it serves any particular purpose or works at all,
12unless he says so in writing. Refer to the GNU CC General Public
13License for full details.
14
15Everyone is granted permission to copy, modify and redistribute
16GNU CC, but only under the conditions described in the
17GNU CC General Public License. A copy of this license is
18supposed to have been given to you along with GNU CC so you
19can know your rights and responsibilities. It should be in a
20file named COPYING. Among other things, the copyright notice
21and this notice must be preserved on all copies.
22*/
23
24#ifdef __GNUG__
25#pragma implementation
26#endif
27#include <stream.h>
28#include <assert.h>
29#include "<T>.<C>.AVLMap.h"
30
31
32/*
33 constants & inlines for maintaining balance & thread status in tree nodes
34*/
35
36#define AVLBALANCEMASK 3
37#define AVLBALANCED 0
38#define AVLLEFTHEAVY 1
39#define AVLRIGHTHEAVY 2
40
41#define LTHREADBIT 4
42#define RTHREADBIT 8
43
44
45static inline int bf(<T><C>AVLNode* t)
46{
47 return t->stat & AVLBALANCEMASK;
48}
49
50static inline void set_bf(<T><C>AVLNode* t, int b)
51{
52 t->stat = (t->stat & ~AVLBALANCEMASK) | (b & AVLBALANCEMASK);
53}
54
55
56static inline int rthread(<T><C>AVLNode* t)
57{
58 return t->stat & RTHREADBIT;
59}
60
61static inline void set_rthread(<T><C>AVLNode* t, int b)
62{
63 if (b)
64 t->stat |= RTHREADBIT;
65 else
66 t->stat &= ~RTHREADBIT;
67}
68
69static inline int lthread(<T><C>AVLNode* t)
70{
71 return t->stat & LTHREADBIT;
72}
73
74static inline void set_lthread(<T><C>AVLNode* t, int b)
75{
76 if (b)
77 t->stat |= LTHREADBIT;
78 else
79 t->stat &= ~LTHREADBIT;
80}
81
82/*
83 traversal primitives
84*/
85
86
87<T><C>AVLNode* <T><C>AVLMap::leftmost()
88{
89 <T><C>AVLNode* t = root;
90 if (t != 0) while (t->lt != 0) t = t->lt;
91 return t;
92}
93
94<T><C>AVLNode* <T><C>AVLMap::rightmost()
95{
96 <T><C>AVLNode* t = root;
97 if (t != 0) while (t->rt != 0) t = t->rt;
98 return t;
99}
100
101<T><C>AVLNode* <T><C>AVLMap::succ(<T><C>AVLNode* t)
102{
103 <T><C>AVLNode* r = t->rt;
104 if (!rthread(t)) while (!lthread(r)) r = r->lt;
105 return r;
106}
107
108<T><C>AVLNode* <T><C>AVLMap::pred(<T><C>AVLNode* t)
109{
110 <T><C>AVLNode* l = t->lt;
111 if (!lthread(t)) while (!rthread(l)) l = l->rt;
112 return l;
113}
114
115
116Pix <T><C>AVLMap::seek(<T&> key)
117{
118 <T><C>AVLNode* t = root;
119 if (t == 0)
120 return 0;
121 for (;;)
122 {
123 int cmp = <T>CMP(key, t->item);
124 if (cmp == 0)
125 return Pix(t);
126 else if (cmp < 0)
127 {
128 if (lthread(t))
129 return 0;
130 else
131 t = t->lt;
132 }
133 else if (rthread(t))
134 return 0;
135 else
136 t = t->rt;
137 }
138}
139
140
141/*
142 The combination of threads and AVL bits make adding & deleting
143 interesting, but very awkward.
144
145 We use the following statics to avoid passing them around recursively
146*/
147
148static int _need_rebalancing; // to send back balance info from rec. calls
149static <T>* _target_item; // add/del_item target
150static <T><C>AVLNode* _found_node; // returned added/deleted node
151static int _already_found; // for deletion subcases
152
153
154void <T><C>AVLMap:: _add(<T><C>AVLNode*& t)
155{
156 int cmp = <T>CMP(*_target_item, t->item);
157 if (cmp == 0)
158 {
159 _found_node = t;
160 return;
161 }
162 else if (cmp < 0)
163 {
164 if (lthread(t))
165 {
166 ++count;
167 _found_node = new <T><C>AVLNode(*_target_item, def);
168 set_lthread(_found_node, 1);
169 set_rthread(_found_node, 1);
170 _found_node->lt = t->lt;
171 _found_node->rt = t;
172 t->lt = _found_node;
173 set_lthread(t, 0);
174 _need_rebalancing = 1;
175 }
176 else
177 _add(t->lt);
178 if (_need_rebalancing)
179 {
180 switch(bf(t))
181 {
182 case AVLRIGHTHEAVY:
183 set_bf(t, AVLBALANCED);
184 _need_rebalancing = 0;
185 return;
186 case AVLBALANCED:
187 set_bf(t, AVLLEFTHEAVY);
188 return;
189 case AVLLEFTHEAVY:
190 <T><C>AVLNode* l = t->lt;
191 if (bf(l) == AVLLEFTHEAVY)
192 {
193 if (rthread(l))
194 t->lt = l;
195 else
196 t->lt = l->rt;
197 set_lthread(t, rthread(l));
198 l->rt = t;
199 set_rthread(l, 0);
200 set_bf(t, AVLBALANCED);
201 set_bf(l, AVLBALANCED);
202 t = l;
203 _need_rebalancing = 0;
204 }
205 else
206 {
207 <T><C>AVLNode* r = l->rt;
208 set_rthread(l, lthread(r));
209 if (lthread(r))
210 l->rt = r;
211 else
212 l->rt = r->lt;
213 r->lt = l;
214 set_lthread(r, 0);
215 set_lthread(t, rthread(r));
216 if (rthread(r))
217 t->lt = r;
218 else
219 t->lt = r->rt;
220 r->rt = t;
221 set_rthread(r, 0);
222 if (bf(r) == AVLLEFTHEAVY)
223 set_bf(t, AVLRIGHTHEAVY);
224 else
225 set_bf(t, AVLBALANCED);
226 if (bf(r) == AVLRIGHTHEAVY)
227 set_bf(l, AVLLEFTHEAVY);
228 else
229 set_bf(l, AVLBALANCED);
230 set_bf(r, AVLBALANCED);
231 t = r;
232 _need_rebalancing = 0;
233 return;
234 }
235 }
236 }
237 }
238 else
239 {
240 if (rthread(t))
241 {
242 ++count;
243 _found_node = new <T><C>AVLNode(*_target_item, def);
244 set_rthread(t, 0);
245 set_lthread(_found_node, 1);
246 set_rthread(_found_node, 1);
247 _found_node->lt = t;
248 _found_node->rt = t->rt;
249 t->rt = _found_node;
250 _need_rebalancing = 1;
251 }
252 else
253 _add(t->rt);
254 if (_need_rebalancing)
255 {
256 switch(bf(t))
257 {
258 case AVLLEFTHEAVY:
259 set_bf(t, AVLBALANCED);
260 _need_rebalancing = 0;
261 return;
262 case AVLBALANCED:
263 set_bf(t, AVLRIGHTHEAVY);
264 return;
265 case AVLRIGHTHEAVY:
266 <T><C>AVLNode* r = t->rt;
267 if (bf(r) == AVLRIGHTHEAVY)
268 {
269 if (lthread(r))
270 t->rt = r;
271 else
272 t->rt = r->lt;
273 set_rthread(t, lthread(r));
274 r->lt = t;
275 set_lthread(r, 0);
276 set_bf(t, AVLBALANCED);
277 set_bf(r, AVLBALANCED);
278 t = r;
279 _need_rebalancing = 0;
280 }
281 else
282 {
283 <T><C>AVLNode* l = r->lt;
284 set_lthread(r, rthread(l));
285 if (rthread(l))
286 r->lt = l;
287 else
288 r->lt = l->rt;
289 l->rt = r;
290 set_rthread(l, 0);
291 set_rthread(t, lthread(l));
292 if (lthread(l))
293 t->rt = l;
294 else
295 t->rt = l->lt;
296 l->lt = t;
297 set_lthread(l, 0);
298 if (bf(l) == AVLRIGHTHEAVY)
299 set_bf(t, AVLLEFTHEAVY);
300 else
301 set_bf(t, AVLBALANCED);
302 if (bf(l) == AVLLEFTHEAVY)
303 set_bf(r, AVLRIGHTHEAVY);
304 else
305 set_bf(r, AVLBALANCED);
306 set_bf(l, AVLBALANCED);
307 t = l;
308 _need_rebalancing = 0;
309 return;
310 }
311 }
312 }
313 }
314}
315
316
317<C>& <T><C>AVLMap::operator [] (<T&> item)
318{
319 if (root == 0)
320 {
321 ++count;
322 root = new <T><C>AVLNode(item, def);
323 set_rthread(root, 1);
324 set_lthread(root, 1);
325 return root->cont;
326 }
327 else
328 {
329 _target_item = &item;
330 _need_rebalancing = 0;
331 _add(root);
332 return _found_node->cont;
333 }
334}
335
336
337void <T><C>AVLMap::_del(<T><C>AVLNode* par, <T><C>AVLNode*& t)
338{
339 int comp;
340 if (_already_found)
341 {
342 if (rthread(t))
343 comp = 0;
344 else
345 comp = 1;
346 }
347 else
348 comp = <T>CMP(*_target_item, t->item);
349 if (comp == 0)
350 {
351 if (lthread(t) && rthread(t))
352 {
353 _found_node = t;
354 if (t == par->lt)
355 {
356 set_lthread(par, 1);
357 par->lt = t->lt;
358 }
359 else
360 {
361 set_rthread(par, 1);
362 par->rt = t->rt;
363 }
364 _need_rebalancing = 1;
365 return;
366 }
367 else if (lthread(t))
368 {
369 _found_node = t;
370 <T><C>AVLNode* s = succ(t);
371 if (s != 0 && lthread(s))
372 s->lt = t->lt;
373 t = t->rt;
374 _need_rebalancing = 1;
375 return;
376 }
377 else if (rthread(t))
378 {
379 _found_node = t;
380 <T><C>AVLNode* p = pred(t);
381 if (p != 0 && rthread(p))
382 p->rt = t->rt;
383 t = t->lt;
384 _need_rebalancing = 1;
385 return;
386 }
387 else // replace item & find someone deletable
388 {
389 <T><C>AVLNode* p = pred(t);
390 t->item = p->item;
391 t->cont = p->cont;
392 _already_found = 1;
393 comp = -1; // fall through below to left
394 }
395 }
396
397 if (comp < 0)
398 {
399 if (lthread(t))
400 return;
401 _del(t, t->lt);
402 if (!_need_rebalancing)
403 return;
404 switch (bf(t))
405 {
406 case AVLLEFTHEAVY:
407 set_bf(t, AVLBALANCED);
408 return;
409 case AVLBALANCED:
410 set_bf(t, AVLRIGHTHEAVY);
411 _need_rebalancing = 0;
412 return;
413 case AVLRIGHTHEAVY:
414 <T><C>AVLNode* r = t->rt;
415 switch (bf(r))
416 {
417 case AVLBALANCED:
418 if (lthread(r))
419 t->rt = r;
420 else
421 t->rt = r->lt;
422 set_rthread(t, lthread(r));
423 r->lt = t;
424 set_lthread(r, 0);
425 set_bf(t, AVLRIGHTHEAVY);
426 set_bf(r, AVLLEFTHEAVY);
427 _need_rebalancing = 0;
428 t = r;
429 return;
430 case AVLRIGHTHEAVY:
431 if (lthread(r))
432 t->rt = r;
433 else
434 t->rt = r->lt;
435 set_rthread(t, lthread(r));
436 r->lt = t;
437 set_lthread(r, 0);
438 set_bf(t, AVLBALANCED);
439 set_bf(r, AVLBALANCED);
440 t = r;
441 return;
442 case AVLLEFTHEAVY:
443 <T><C>AVLNode* l = r->lt;
444 set_lthread(r, rthread(l));
445 if (rthread(l))
446 r->lt = l;
447 else
448 r->lt = l->rt;
449 l->rt = r;
450 set_rthread(l, 0);
451 set_rthread(t, lthread(l));
452 if (lthread(l))
453 t->rt = l;
454 else
455 t->rt = l->lt;
456 l->lt = t;
457 set_lthread(l, 0);
458 if (bf(l) == AVLRIGHTHEAVY)
459 set_bf(t, AVLLEFTHEAVY);
460 else
461 set_bf(t, AVLBALANCED);
462 if (bf(l) == AVLLEFTHEAVY)
463 set_bf(r, AVLRIGHTHEAVY);
464 else
465 set_bf(r, AVLBALANCED);
466 set_bf(l, AVLBALANCED);
467 t = l;
468 return;
469 }
470 }
471 }
472 else
473 {
474 if (rthread(t))
475 return;
476 _del(t, t->rt);
477 if (!_need_rebalancing)
478 return;
479 switch (bf(t))
480 {
481 case AVLRIGHTHEAVY:
482 set_bf(t, AVLBALANCED);
483 return;
484 case AVLBALANCED:
485 set_bf(t, AVLLEFTHEAVY);
486 _need_rebalancing = 0;
487 return;
488 case AVLLEFTHEAVY:
489 <T><C>AVLNode* l = t->lt;
490 switch (bf(l))
491 {
492 case AVLBALANCED:
493 if (rthread(l))
494 t->lt = l;
495 else
496 t->lt = l->rt;
497 set_lthread(t, rthread(l));
498 l->rt = t;
499 set_rthread(l, 0);
500 set_bf(t, AVLLEFTHEAVY);
501 set_bf(l, AVLRIGHTHEAVY);
502 _need_rebalancing = 0;
503 t = l;
504 return;
505 case AVLLEFTHEAVY:
506 if (rthread(l))
507 t->lt = l;
508 else
509 t->lt = l->rt;
510 set_lthread(t, rthread(l));
511 l->rt = t;
512 set_rthread(l, 0);
513 set_bf(t, AVLBALANCED);
514 set_bf(l, AVLBALANCED);
515 t = l;
516 return;
517 case AVLRIGHTHEAVY:
518 <T><C>AVLNode* r = l->rt;
519 set_rthread(l, lthread(r));
520 if (lthread(r))
521 l->rt = r;
522 else
523 l->rt = r->lt;
524 r->lt = l;
525 set_lthread(r, 0);
526 set_lthread(t, rthread(r));
527 if (rthread(r))
528 t->lt = r;
529 else
530 t->lt = r->rt;
531 r->rt = t;
532 set_rthread(r, 0);
533 if (bf(r) == AVLLEFTHEAVY)
534 set_bf(t, AVLRIGHTHEAVY);
535 else
536 set_bf(t, AVLBALANCED);
537 if (bf(r) == AVLRIGHTHEAVY)
538 set_bf(l, AVLLEFTHEAVY);
539 else
540 set_bf(l, AVLBALANCED);
541 set_bf(r, AVLBALANCED);
542 t = r;
543 return;
544 }
545 }
546 }
547}
548
549
550
551void <T><C>AVLMap::del(<T&> item)
552{
553 if (root == 0) return;
554 _need_rebalancing = 0;
555 _already_found = 0;
556 _found_node = 0;
557 _target_item = &item;
558 _del(root, root);
559 if (_found_node)
560 {
561 delete(_found_node);
562 if (--count == 0)
563 root = 0;
564 }
565}
566
567void <T><C>AVLMap::_kill(<T><C>AVLNode* t)
568{
569 if (t != 0)
570 {
571 if (!lthread(t)) _kill(t->lt);
572 if (!rthread(t)) _kill(t->rt);
573 delete t;
574 }
575}
576
577
578<T><C>AVLMap::<T><C>AVLMap(<T><C>AVLMap& b) :<T><C>Map(b.def)
579{
580 root = 0;
581 count = 0;
582 for (Pix i = b.first(); i != 0; b.next(i))
583 (*this)[b.key(i)] = b.contents(i);
584}
585
586
587int <T><C>AVLMap::OK()
588{
589 int v = 1;
590 if (root == 0)
591 v = count == 0;
592 else
593 {
594 int n = 1;
595 <T><C>AVLNode* trail = leftmost();
596 <T><C>AVLNode* t = succ(trail);
597 while (t != 0)
598 {
599 ++n;
600 v &= <T>CMP(trail->item, t->item) < 0;
601 trail = t;
602 t = succ(t);
603 }
604 v &= n == count;
605 }
606 if (!v) error("invariant failure");
607 return v;
608}