modules/up/src/gnug++/CacheKey.CacheValue.AVLMap.cc

/* [<][>]
[^][v][top][bottom][index][help] */

FUNCTIONS

This source file includes following functions.
  1. bf
  2. set_bf
  3. rthread
  4. set_rthread
  5. lthread
  6. set_lthread
  7. leftmost
  8. rightmost
  9. succ
  10. pred
  11. seek
  12. _add
  13. _del
  14. del
  15. _kill
  16. CacheKeyCacheValueAVLMap
  17. OK

   1 // This may look like C code, but it is really -*- C++ -*-
   2 /* 
   3 Copyright (C) 1988 Free Software Foundation
   4     written by Doug Lea (dl@rocky.oswego.edu)
   5 
   6 This file is part of the GNU C++ Library.  This library is free
   7 software; you can redistribute it and/or modify it under the terms of
   8 the GNU Library General Public License as published by the Free
   9 Software Foundation; either version 2 of the License, or (at your
  10 option) any later version.  This library is distributed in the hope
  11 that it will be useful, but WITHOUT ANY WARRANTY; without even the
  12 implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
  13 PURPOSE.  See the GNU Library General Public License for more details.
  14 You should have received a copy of the GNU Library General Public
  15 License along with this library; if not, write to the Free Software
  16 Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
  17 */
  18 
  19 #ifdef __GNUG__
  20 #pragma implementation
  21 #endif
  22 #include "CacheKey.CacheValue.AVLMap.h"
  23 
  24 
  25 /*
  26  constants & inlines for maintaining balance & thread status in tree nodes
  27 */
  28 
  29 #define AVLBALANCEMASK    3
  30 #define AVLBALANCED       0
  31 #define AVLLEFTHEAVY      1
  32 #define AVLRIGHTHEAVY     2
  33 
  34 #define LTHREADBIT        4
  35 #define RTHREADBIT        8
  36 
  37 
  38 static inline int bf(CacheKeyCacheValueAVLNode* t)
     /* [<][>][^][v][top][bottom][index][help] */
  39 {
  40   return t->stat & AVLBALANCEMASK;
  41 }
  42 
  43 static inline void set_bf(CacheKeyCacheValueAVLNode* t, int b)
     /* [<][>][^][v][top][bottom][index][help] */
  44 {
  45   t->stat = (t->stat & ~AVLBALANCEMASK) | (b & AVLBALANCEMASK);
  46 }
  47 
  48 
  49 static inline int rthread(CacheKeyCacheValueAVLNode* t)
     /* [<][>][^][v][top][bottom][index][help] */
  50 {
  51   return t->stat & RTHREADBIT;
  52 }
  53 
  54 static inline void set_rthread(CacheKeyCacheValueAVLNode* t, int b)
     /* [<][>][^][v][top][bottom][index][help] */
  55 {
  56   if (b)
  57     t->stat |= RTHREADBIT;
  58   else
  59     t->stat &= ~RTHREADBIT;
  60 }
  61 
  62 static inline int lthread(CacheKeyCacheValueAVLNode* t)
     /* [<][>][^][v][top][bottom][index][help] */
  63 {
  64   return t->stat & LTHREADBIT;
  65 }
  66 
  67 static inline void set_lthread(CacheKeyCacheValueAVLNode* t, int b)
     /* [<][>][^][v][top][bottom][index][help] */
  68 {
  69   if (b)
  70     t->stat |= LTHREADBIT;
  71   else
  72     t->stat &= ~LTHREADBIT;
  73 }
  74 
  75 /*
  76  traversal primitives
  77 */
  78 
  79 
  80 CacheKeyCacheValueAVLNode* CacheKeyCacheValueAVLMap::leftmost()
     /* [<][>][^][v][top][bottom][index][help] */
  81 {
  82   CacheKeyCacheValueAVLNode* t = root;
  83   if (t != 0) while (t->lt != 0) t = t->lt;
  84   return t;
  85 }
  86 
  87 CacheKeyCacheValueAVLNode* CacheKeyCacheValueAVLMap::rightmost()
     /* [<][>][^][v][top][bottom][index][help] */
  88 {
  89   CacheKeyCacheValueAVLNode* t = root;
  90   if (t != 0) while (t->rt != 0) t = t->rt;
  91   return t;
  92 }
  93 
  94 CacheKeyCacheValueAVLNode* CacheKeyCacheValueAVLMap::succ(CacheKeyCacheValueAVLNode* t)
     /* [<][>][^][v][top][bottom][index][help] */
  95 {
  96   CacheKeyCacheValueAVLNode* r = t->rt;
  97   if (!rthread(t)) while (!lthread(r)) r = r->lt;
  98   return r;
  99 }
 100 
 101 CacheKeyCacheValueAVLNode* CacheKeyCacheValueAVLMap::pred(CacheKeyCacheValueAVLNode* t)
     /* [<][>][^][v][top][bottom][index][help] */
 102 {
 103   CacheKeyCacheValueAVLNode* l = t->lt;
 104   if (!lthread(t)) while (!rthread(l)) l = l->rt;
 105   return l;
 106 }
 107 
 108 
 109 Pix CacheKeyCacheValueAVLMap::seek(CacheKey  key)
     /* [<][>][^][v][top][bottom][index][help] */
 110 {
 111   CacheKeyCacheValueAVLNode* t = root;
 112   if (t == 0)
 113     return 0;
 114   for (;;)
 115   {
 116     int cmp = CacheKeyCMP(key, t->item);
 117     if (cmp == 0)
 118       return Pix(t);
 119     else if (cmp < 0)
 120     {
 121       if (lthread(t))
 122         return 0;
 123       else
 124         t = t->lt;
 125     }
 126     else if (rthread(t))
 127       return 0;
 128     else
 129       t = t->rt;
 130   }
 131 }
 132 
 133 
 134 /*
 135  The combination of threads and AVL bits make adding & deleting
 136  interesting, but very awkward.
 137 
 138  We use the following statics to avoid passing them around recursively
 139 */
 140 
 141 static int _need_rebalancing;   // to send back balance info from rec. calls
 142 static CacheKey*   _target_item;     // add/del_item target
 143 static CacheKeyCacheValueAVLNode* _found_node; // returned added/deleted node
 144 static int    _already_found;   // for deletion subcases
 145 
 146 
 147 void CacheKeyCacheValueAVLMap:: _add(CacheKeyCacheValueAVLNode*& t)
     /* [<][>][^][v][top][bottom][index][help] */
 148 {
 149   int cmp = CacheKeyCMP(*_target_item, t->item);
 150   if (cmp == 0)
 151   {
 152     _found_node = t;
 153     return;
 154   }
 155   else if (cmp < 0)
 156   {
 157     if (lthread(t))
 158     {
 159       ++count;
 160       _found_node = new CacheKeyCacheValueAVLNode(*_target_item, def);
 161       set_lthread(_found_node, 1);
 162       set_rthread(_found_node, 1);
 163       _found_node->lt = t->lt;
 164       _found_node->rt = t;
 165       t->lt = _found_node;
 166       set_lthread(t, 0);
 167       _need_rebalancing = 1;
 168     }
 169     else
 170       _add(t->lt);
 171     if (_need_rebalancing)
 172     {
 173       switch(bf(t))
 174       {
 175       case AVLRIGHTHEAVY:
 176         set_bf(t, AVLBALANCED);
 177         _need_rebalancing = 0;
 178         return;
 179       case AVLBALANCED:
 180         set_bf(t, AVLLEFTHEAVY);
 181         return;
 182       case AVLLEFTHEAVY:
 183         {
 184         CacheKeyCacheValueAVLNode* l = t->lt;
 185         if (bf(l) == AVLLEFTHEAVY)
 186         {
 187           if (rthread(l))
 188             t->lt = l;
 189           else
 190             t->lt = l->rt;
 191           set_lthread(t, rthread(l));
 192           l->rt = t;
 193           set_rthread(l, 0);
 194           set_bf(t, AVLBALANCED);
 195           set_bf(l, AVLBALANCED);
 196           t = l;
 197           _need_rebalancing = 0;
 198         }
 199         else
 200         {
 201           CacheKeyCacheValueAVLNode* r = l->rt;
 202           set_rthread(l, lthread(r));
 203           if (lthread(r))
 204             l->rt = r;
 205           else
 206             l->rt = r->lt;
 207           r->lt = l;
 208           set_lthread(r, 0);
 209           set_lthread(t, rthread(r));
 210           if (rthread(r))
 211             t->lt = r;
 212           else
 213             t->lt = r->rt;
 214           r->rt = t;
 215           set_rthread(r, 0);
 216           if (bf(r) == AVLLEFTHEAVY)
 217             set_bf(t, AVLRIGHTHEAVY);
 218           else
 219             set_bf(t, AVLBALANCED);
 220           if (bf(r) == AVLRIGHTHEAVY)
 221             set_bf(l, AVLLEFTHEAVY);
 222           else
 223             set_bf(l, AVLBALANCED);
 224           set_bf(r, AVLBALANCED);
 225           t = r;
 226           _need_rebalancing = 0;
 227           return;
 228         }
 229         }
 230       }
 231     }
 232   }
 233   else
 234   {
 235     if (rthread(t))
 236     {
 237       ++count;
 238       _found_node = new CacheKeyCacheValueAVLNode(*_target_item, def);
 239       set_rthread(t, 0);
 240       set_lthread(_found_node, 1);
 241       set_rthread(_found_node, 1);
 242       _found_node->lt = t;
 243       _found_node->rt = t->rt;
 244       t->rt = _found_node;
 245       _need_rebalancing = 1;
 246     }
 247     else
 248       _add(t->rt);
 249     if (_need_rebalancing)
 250     {
 251       switch(bf(t))
 252       {
 253       case AVLLEFTHEAVY:
 254         set_bf(t, AVLBALANCED);
 255         _need_rebalancing = 0;
 256         return;
 257       case AVLBALANCED:
 258         set_bf(t, AVLRIGHTHEAVY);
 259         return;
 260       case AVLRIGHTHEAVY:
 261         {
 262         CacheKeyCacheValueAVLNode* r = t->rt;
 263         if (bf(r) == AVLRIGHTHEAVY)
 264         {
 265           if (lthread(r))
 266             t->rt = r;
 267           else
 268             t->rt = r->lt;
 269           set_rthread(t, lthread(r));
 270           r->lt = t;
 271           set_lthread(r, 0);
 272           set_bf(t, AVLBALANCED);
 273           set_bf(r, AVLBALANCED);
 274           t = r;
 275           _need_rebalancing = 0;
 276         }
 277         else
 278         {
 279           CacheKeyCacheValueAVLNode* l = r->lt;
 280           set_lthread(r, rthread(l));
 281           if (rthread(l))
 282             r->lt = l;
 283           else
 284             r->lt = l->rt;
 285           l->rt = r;
 286           set_rthread(l, 0);
 287           set_rthread(t, lthread(l));
 288           if (lthread(l))
 289             t->rt = l;
 290           else
 291             t->rt = l->lt;
 292           l->lt = t;
 293           set_lthread(l, 0);
 294           if (bf(l) == AVLRIGHTHEAVY)
 295             set_bf(t, AVLLEFTHEAVY);
 296           else
 297             set_bf(t, AVLBALANCED);
 298           if (bf(l) == AVLLEFTHEAVY)
 299             set_bf(r, AVLRIGHTHEAVY);
 300           else
 301             set_bf(r, AVLBALANCED);
 302           set_bf(l, AVLBALANCED);
 303           t = l;
 304           _need_rebalancing = 0;
 305           return;
 306         }
 307         }
 308       }
 309     }
 310   }
 311 }
 312 
 313     
 314 CacheValue& CacheKeyCacheValueAVLMap::operator [] (CacheKey  item)
 315 {
 316   if (root == 0)
 317   {
 318     ++count;
 319     root = new CacheKeyCacheValueAVLNode(item, def);
 320     set_rthread(root, 1);
 321     set_lthread(root, 1);
 322     return root->cont;
 323   }
 324   else
 325   {
 326     _target_item = &item;
 327     _need_rebalancing = 0;
 328     _add(root);
 329     return _found_node->cont;
 330   }
 331 }
 332 
 333 
 334 void CacheKeyCacheValueAVLMap::_del(CacheKeyCacheValueAVLNode* par, CacheKeyCacheValueAVLNode*& t)
     /* [<][>][^][v][top][bottom][index][help] */
 335 {
 336   int comp;
 337   if (_already_found)
 338   {
 339     if (rthread(t))
 340       comp = 0;
 341     else
 342       comp = 1;
 343   }
 344   else 
 345     comp = CacheKeyCMP(*_target_item, t->item);
 346   if (comp == 0)
 347   {
 348     if (lthread(t) && rthread(t))
 349     {
 350       _found_node = t;
 351       if (t == par->lt)
 352       {
 353         set_lthread(par, 1);
 354         par->lt = t->lt;
 355       }
 356       else
 357       {
 358         set_rthread(par, 1);
 359         par->rt = t->rt;
 360       }
 361       _need_rebalancing = 1;
 362       return;
 363     }
 364     else if (lthread(t))
 365     {
 366       _found_node = t;
 367       CacheKeyCacheValueAVLNode* s = succ(t);
 368       if (s != 0 && lthread(s))
 369         s->lt = t->lt;
 370       t = t->rt;
 371       _need_rebalancing = 1;
 372       return;
 373     }
 374     else if (rthread(t))
 375     {
 376       _found_node = t;
 377       CacheKeyCacheValueAVLNode* p = pred(t);
 378       if (p != 0 && rthread(p))
 379         p->rt = t->rt;
 380       t = t->lt;
 381       _need_rebalancing = 1;
 382       return;
 383     }
 384     else                        // replace item & find someone deletable
 385     {
 386       CacheKeyCacheValueAVLNode* p = pred(t);
 387       t->item = p->item;
 388       t->cont = p->cont;
 389       _already_found = 1;
 390       comp = -1;                // fall through below to left
 391     }
 392   }
 393 
 394   if (comp < 0)
 395   {
 396     if (lthread(t))
 397       return;
 398     _del(t, t->lt);
 399     if (!_need_rebalancing)
 400       return;
 401     switch (bf(t))
 402     {
 403     case AVLLEFTHEAVY:
 404       set_bf(t, AVLBALANCED);
 405       return;
 406     case AVLBALANCED:
 407       set_bf(t, AVLRIGHTHEAVY);
 408       _need_rebalancing = 0;
 409       return;
 410     case AVLRIGHTHEAVY:
 411       {
 412       CacheKeyCacheValueAVLNode* r = t->rt;
 413       switch (bf(r))
 414       {
 415       case AVLBALANCED:
 416         if (lthread(r))
 417           t->rt = r;
 418         else
 419           t->rt = r->lt;
 420         set_rthread(t, lthread(r));
 421         r->lt = t;
 422         set_lthread(r, 0);
 423         set_bf(t, AVLRIGHTHEAVY);
 424         set_bf(r, AVLLEFTHEAVY);
 425         _need_rebalancing = 0;
 426         t = r;
 427         return;
 428       case AVLRIGHTHEAVY:
 429         if (lthread(r))
 430           t->rt = r;
 431         else
 432           t->rt = r->lt;
 433         set_rthread(t, lthread(r));
 434         r->lt = t;
 435         set_lthread(r, 0);
 436         set_bf(t, AVLBALANCED);
 437         set_bf(r, AVLBALANCED);
 438         t = r;
 439         return;
 440       case AVLLEFTHEAVY:
 441         {
 442         CacheKeyCacheValueAVLNode* l = r->lt;
 443         set_lthread(r, rthread(l));
 444         if (rthread(l))
 445           r->lt = l;
 446         else
 447           r->lt = l->rt;
 448         l->rt = r;
 449         set_rthread(l, 0);
 450         set_rthread(t, lthread(l));
 451         if (lthread(l))
 452           t->rt = l;
 453         else
 454           t->rt = l->lt;
 455         l->lt = t;
 456         set_lthread(l, 0);
 457         if (bf(l) == AVLRIGHTHEAVY)
 458           set_bf(t, AVLLEFTHEAVY);
 459         else
 460           set_bf(t, AVLBALANCED);
 461         if (bf(l) == AVLLEFTHEAVY)
 462           set_bf(r, AVLRIGHTHEAVY);
 463         else
 464           set_bf(r, AVLBALANCED);
 465         set_bf(l, AVLBALANCED);
 466         t = l;
 467         return;
 468         }
 469       }
 470     }
 471     }
 472   }
 473   else
 474   {
 475     if (rthread(t))
 476       return;
 477     _del(t, t->rt);
 478     if (!_need_rebalancing)
 479       return;
 480     switch (bf(t))
 481     {
 482     case AVLRIGHTHEAVY:
 483       set_bf(t, AVLBALANCED);
 484       return;
 485     case AVLBALANCED:
 486       set_bf(t, AVLLEFTHEAVY);
 487       _need_rebalancing = 0;
 488       return;
 489     case AVLLEFTHEAVY:
 490       {
 491       CacheKeyCacheValueAVLNode* l = t->lt;
 492       switch (bf(l))
 493       {
 494       case AVLBALANCED:
 495         if (rthread(l))
 496           t->lt = l;
 497         else
 498           t->lt = l->rt;
 499         set_lthread(t, rthread(l));
 500         l->rt = t;
 501         set_rthread(l, 0);
 502         set_bf(t, AVLLEFTHEAVY);
 503         set_bf(l, AVLRIGHTHEAVY);
 504         _need_rebalancing = 0;
 505         t = l;
 506         return;
 507       case AVLLEFTHEAVY:
 508         if (rthread(l))
 509           t->lt = l;
 510         else
 511           t->lt = l->rt;
 512         set_lthread(t, rthread(l));
 513         l->rt = t;
 514         set_rthread(l, 0);
 515         set_bf(t, AVLBALANCED);
 516         set_bf(l, AVLBALANCED);
 517         t = l;
 518         return;
 519       case AVLRIGHTHEAVY:
 520         {
 521         CacheKeyCacheValueAVLNode* r = l->rt;
 522         set_rthread(l, lthread(r));
 523         if (lthread(r))
 524           l->rt = r;
 525         else
 526           l->rt = r->lt;
 527         r->lt = l;
 528         set_lthread(r, 0);
 529         set_lthread(t, rthread(r));
 530         if (rthread(r))
 531           t->lt = r;
 532         else
 533           t->lt = r->rt;
 534         r->rt = t;
 535         set_rthread(r, 0);
 536         if (bf(r) == AVLLEFTHEAVY)
 537           set_bf(t, AVLRIGHTHEAVY);
 538         else
 539           set_bf(t, AVLBALANCED);
 540         if (bf(r) == AVLRIGHTHEAVY)
 541           set_bf(l, AVLLEFTHEAVY);
 542         else
 543           set_bf(l, AVLBALANCED);
 544         set_bf(r, AVLBALANCED);
 545         t = r;
 546         return;
 547         }
 548       }
 549       }
 550     }
 551   }
 552 }
 553 
 554         
 555 
 556 void CacheKeyCacheValueAVLMap::del(CacheKey  item)
     /* [<][>][^][v][top][bottom][index][help] */
 557 {
 558   if (root == 0) return;
 559   _need_rebalancing = 0;
 560   _already_found = 0;
 561   _found_node = 0;
 562   _target_item = &item;
 563   _del(root, root);
 564   if (_found_node)
 565   {
 566     delete(_found_node);
 567     if (--count == 0)
 568       root = 0;
 569   }
 570 }
 571 
 572 void CacheKeyCacheValueAVLMap::_kill(CacheKeyCacheValueAVLNode* t)
     /* [<][>][^][v][top][bottom][index][help] */
 573 {
 574   if (t != 0)
 575   {
 576     if (!lthread(t)) _kill(t->lt);
 577     if (!rthread(t)) _kill(t->rt);
 578     delete t;
 579   }
 580 }
 581 
 582 
 583 CacheKeyCacheValueAVLMap::CacheKeyCacheValueAVLMap(CacheKeyCacheValueAVLMap& b) :CacheKeyCacheValueMap(b.def)
     /* [<][>][^][v][top][bottom][index][help] */
 584 {
 585   root = 0;
 586   count = 0;
 587   for (Pix i = b.first(); i != 0; b.next(i)) 
 588     (*this)[b.key(i)] = b.contents(i);
 589 }
 590 
 591 
 592 int CacheKeyCacheValueAVLMap::OK()
     /* [<][>][^][v][top][bottom][index][help] */
 593 {
 594   int v = 1;
 595   if (root == 0) 
 596     v = count == 0;
 597   else
 598   {
 599     int n = 1;
 600     CacheKeyCacheValueAVLNode* trail = leftmost();
 601     CacheKeyCacheValueAVLNode* t = succ(trail);
 602     while (t != 0)
 603     {
 604       ++n;
 605       v &= CacheKeyCMP(trail->item, t->item) < 0;
 606       trail = t;
 607       t = succ(t);
 608     }
 609     v &= n == count;
 610   }
 611   if (!v) error("invariant failure");
 612   return v;
 613 }

/* [<][>][^][v][top][bottom][index][help] */