Merge from vendor branch LESS:
[dragonfly.git] / sys / vfs / hammer / hammer_alist.c
1 /*
2  * HAMMER_ALIST.C -
3  *
4  * Bitmap allocator/deallocator, using a radix tree with hinting.
5  * Unlimited-size allocations, power-of-2 only, power-of-2 aligned results
6  * only.  This module has been separated from the generic kernel module and
7  * written specifically for embedding in HAMMER storage structures.
8  * 
9  * Copyright (c) 2007 The DragonFly Project.  All rights reserved.
10  * 
11  * This code is derived from software contributed to The DragonFly Project
12  * by Matthew Dillon <dillon@backplane.com>
13  * 
14  * Redistribution and use in source and binary forms, with or without
15  * modification, are permitted provided that the following conditions
16  * are met:
17  * 
18  * 1. Redistributions of source code must retain the above copyright
19  *    notice, this list of conditions and the following disclaimer.
20  * 2. Redistributions in binary form must reproduce the above copyright
21  *    notice, this list of conditions and the following disclaimer in
22  *    the documentation and/or other materials provided with the
23  *    distribution.
24  * 3. Neither the name of The DragonFly Project nor the names of its
25  *    contributors may be used to endorse or promote products derived
26  *    from this software without specific, prior written permission.
27  * 
28  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
29  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
30  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
31  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE
32  * COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
33  * INCIDENTAL, SPECIAL, EXEMPLARY OR CONSEQUENTIAL DAMAGES (INCLUDING,
34  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
35  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
36  * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
37  * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
38  * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
39  * SUCH DAMAGE.
40  *
41  * $DragonFly: src/sys/vfs/hammer/Attic/hammer_alist.c,v 1.2 2007/10/12 18:57:45 dillon Exp $
42  */
43 /*
44  * This module implements a generic allocator through the use of a hinted
45  * radix tree.  All allocations must be in powers of 2 and will return
46  * similarly aligned results.  The radix tree typically recurses within
47  * a memory buffer and then continues its recursion by chaining to other
48  * memory buffers, creating a seemless whole capable of managing any amount
49  * of storage.
50  *
51  * The radix tree is layed out recursively using a linear array.  Each meta
52  * node is immediately followed (layed out sequentially in memory) by
53  * HAMMER_ALIST_META_RADIX lower level nodes.  This is a recursive structure
54  * but one that can be easily scanned through a very simple 'skip'
55  * calculation.
56  *
57  * The radix tree supports an early-termination optimization which
58  * effectively allows us to efficiently mix large and small allocations
59  * with a single abstraction.  The address space can be partitioned
60  * arbitrarily without adding much in the way of additional meta-storage
61  * for the allocator.
62  *
63  * The radix tree supports allocator layering. By supplying a base_radix > 1
64  * the allocator will issue callbacks to recurse into lower layers once 
65  * higher layers have been exhausted.  Allocations in multiples of base_radix
66  * will be entirely retained in the higher level allocator and never recurse.
67  *
68  * This code can be compiled stand-alone for debugging.
69  */
70
71 #ifdef _KERNEL
72
73 #include <sys/param.h>
74 #include <sys/systm.h>
75 #include <sys/lock.h>
76 #include <sys/kernel.h>
77 #include <sys/malloc.h>
78 #include <vm/vm.h>
79 #include <vm/vm_object.h>
80 #include <vm/vm_kern.h>
81 #include <vm/vm_extern.h>
82 #include <vm/vm_page.h>
83
84 #include "hammer_disk.h"
85
86 #else
87
88 #ifndef ALIST_NO_DEBUG
89 #define ALIST_DEBUG
90 #endif
91
92 #include <sys/types.h>
93 #include <stdio.h>
94 #include <assert.h>
95 #include <string.h>
96 #include <stdlib.h>
97 #include <stdarg.h>
98
99 #define kmalloc(a,b,c)  malloc(a)
100 #define kfree(a,b)      free(a)
101 #define kprintf         printf
102 #define KKASSERT(exp)   assert(exp)
103 struct malloc_type;
104
105 #include "hammer_alist.h"
106
107 void panic(const char *ctl, ...);
108
109 #endif
110
111 /*
112  * static support functions
113  */
114
115 static int32_t hammer_alst_leaf_alloc_fwd(hammer_almeta_t scan,
116                                         int32_t blk, int count);
117 static int32_t hammer_alst_meta_alloc_fwd(hammer_alist_t live,
118                                         hammer_almeta_t scan,
119                                         int32_t blk, int32_t count,
120                                         int32_t radix, int skip);
121 static int32_t hammer_alst_leaf_alloc_rev(hammer_almeta_t scan,
122                                         int32_t blk, int count);
123 static int32_t hammer_alst_meta_alloc_rev(hammer_alist_t live,
124                                         hammer_almeta_t scan,
125                                         int32_t blk, int32_t count,
126                                         int32_t radix, int skip);
127 static void hammer_alst_leaf_free(hammer_almeta_t scan,
128                                         int32_t relblk, int count);
129 static void hammer_alst_meta_free(hammer_alist_t live, hammer_almeta_t scan,
130                                         int32_t freeBlk, int32_t count, 
131                                         int32_t radix, int skip, int32_t blk);
132 static int32_t  hammer_alst_radix_init(hammer_almeta_t scan,
133                                         int32_t radix, int skip, int32_t count);
134 #ifdef ALIST_DEBUG
135 static void     hammer_alst_radix_print(hammer_alist_t live,
136                                         hammer_almeta_t scan, int32_t blk,
137                                         int32_t radix, int skip, int tab);
138 #endif
139
140 /*
141  * Initialize an a-list config structure for use.  The config structure
142  * describes the basic structure of an a-list's topology and may be
143  * shared by any number of a-lists which have the same topology.
144  *
145  * blocks is the total number of blocks, that is the number of blocks
146  * handled at this layer multiplied by the base radix.
147  *
148  * When base_radix != 1 the A-list has only meta elements and does not have
149  * any leaves, in order to be able to track partial allocations.
150  */
151 void
152 hammer_alist_template(hammer_alist_config_t bl, int32_t blocks,
153                       int32_t base_radix, int32_t maxmeta)
154 {
155         int radix;
156         int skip;
157
158         /*
159          * Calculate radix and skip field used for scanning.  The leaf nodes
160          * in our tree are either BMAP or META nodes depending on whether
161          * we chain to a lower level allocation layer or not.
162          */
163         if (base_radix == 1)
164                 radix = HAMMER_ALIST_BMAP_RADIX;
165         else
166                 radix = HAMMER_ALIST_META_RADIX;
167         skip = 1;
168
169         while (radix < blocks / base_radix) {
170                 radix *= HAMMER_ALIST_META_RADIX;
171                 skip = skip * HAMMER_ALIST_META_RADIX + 1;
172         }
173
174         /*
175          * Increase the radix based on the number of blocks a lower level
176          * allocator is able to handle at the 'base' of our allocator.
177          * If base_radix != 1 the caller will have to initialize the callback
178          * fields to implement the lower level allocator.
179          */
180         radix *= base_radix;
181
182         bzero(bl, sizeof(*bl));
183
184         bl->bl_blocks = blocks;
185         bl->bl_base_radix = base_radix;
186         bl->bl_radix = radix;
187         bl->bl_skip = skip;
188         bl->bl_rootblks = hammer_alst_radix_init(NULL, bl->bl_radix,
189                                                  bl->bl_skip, blocks);
190         ++bl->bl_rootblks;      /* one more for freeblks header */
191         if (base_radix == 1)
192                 bl->bl_terminal = 1;
193         KKASSERT(maxmeta == 0 || bl->bl_rootblks <= maxmeta);
194
195 #if defined(ALIST_DEBUG)
196         kprintf(
197                 "PRIMARY ALIST LAYER manages %d blocks"
198                 ", requiring %dK (%d bytes) of ram\n",
199                 bl->bl_blocks / bl->bl_base_radix,
200                 (bl->bl_rootblks * sizeof(struct hammer_almeta) + 1023) / 1024,
201                 (bl->bl_rootblks * sizeof(struct hammer_almeta))
202         );
203         kprintf("ALIST raw radix tree contains %d records\n", bl->bl_rootblks);
204 #endif
205 }
206
207 void
208 hammer_alist_init(hammer_alist_t live)
209 {
210         hammer_alist_config_t bl = live->config;
211
212         live->meta->bm_bighint = 0;
213         live->meta->bm_alist_freeblks = 0;
214         hammer_alst_radix_init(live->meta + 1, bl->bl_radix,
215                                bl->bl_skip, bl->bl_blocks);
216 }
217
218 #if !defined(_KERNEL) && defined(ALIST_DEBUG)
219
220 /*
221  * hammer_alist_create()        (userland only)
222  *
223  *      create a alist capable of handling up to the specified number of
224  *      blocks.  blocks must be greater then 0
225  *
226  *      The smallest alist consists of a single leaf node capable of 
227  *      managing HAMMER_ALIST_BMAP_RADIX blocks, or (if base_radix != 1)
228  *      a single meta node capable of managing HAMMER_ALIST_META_RADIX
229  *      blocks which recurses into other storage layers for a total
230  *      handling capability of (HAMMER_ALIST_META_RADIX * base_radix) blocks.
231  *
232  *      Larger a-list's increase their capability exponentially by
233  *      HAMMER_ALIST_META_RADIX.
234  *
235  *      The block count is the total number of blocks inclusive of any
236  *      layering.  blocks can be less then base_radix and will result in
237  *      a radix tree with a single leaf node which then chains down.
238  */
239
240 hammer_alist_t 
241 hammer_alist_create(int32_t blocks, int32_t base_radix,
242                     struct malloc_type *mtype)
243 {
244         hammer_alist_t live;
245         hammer_alist_config_t bl;
246         size_t metasize;
247
248         live = kmalloc(sizeof(*live), mtype, M_WAITOK);
249         live->config = bl = kmalloc(sizeof(*bl), mtype, M_WAITOK);
250         hammer_alist_template(bl, blocks, base_radix, 0);
251
252         metasize = sizeof(*live->meta) * bl->bl_rootblks;
253         live->meta = kmalloc(metasize, mtype, M_WAITOK);
254         bzero(live->meta, metasize);
255
256 #if defined(ALIST_DEBUG)
257         kprintf(
258                 "ALIST representing %d blocks (%d MB of swap)"
259                 ", requiring %dK (%d bytes) of ram\n",
260                 bl->bl_blocks,
261                 bl->bl_blocks * 4 / 1024,
262                 (bl->bl_rootblks * sizeof(*live->meta) + 1023) / 1024,
263                 (bl->bl_rootblks * sizeof(*live->meta))
264         );
265         if (base_radix != 1) {
266                 kprintf("ALIST recurses when it reaches a base_radix of %d\n",
267                         base_radix);
268         }
269         kprintf("ALIST raw radix tree contains %d records\n", bl->bl_rootblks);
270 #endif
271         hammer_alist_init(live);
272         return(live);
273 }
274
275 void
276 hammer_alist_destroy(hammer_alist_t live, struct malloc_type *mtype)
277 {
278         kfree(live->config, mtype);
279         kfree(live->meta, mtype);
280         live->config = NULL;
281         live->meta = NULL;
282         kfree(live, mtype);
283 }
284
285 #endif
286
287 /*
288  * hammer_alist_alloc()
289  *
290  *      Reserve space in the block bitmap.  Return the base of a contiguous
291  *      region or HAMMER_ALIST_BLOCK_NONE if space could not be allocated.
292  */
293
294 int32_t 
295 hammer_alist_alloc(hammer_alist_t live, int32_t count)
296 {
297         int32_t blk = HAMMER_ALIST_BLOCK_NONE;
298         hammer_alist_config_t bl = live->config;
299
300         KKASSERT((count | (count - 1)) == (count << 1) - 1);
301
302         if (bl && count <= bl->bl_radix) {
303                 /*
304                  * When skip is 1 we are at a leaf.  If we are the terminal
305                  * allocator we use our native leaf functions and radix will
306                  * be HAMMER_ALIST_BMAP_RADIX.  Otherwise we are a meta node
307                  * which will chain to another allocator.
308                  */
309                 if (bl->bl_skip == 1 && bl->bl_terminal) {
310 #ifndef _KERNEL
311                         KKASSERT(bl->bl_radix == HAMMER_ALIST_BMAP_RADIX);
312 #endif
313                         blk = hammer_alst_leaf_alloc_fwd(
314                                     live->meta + 1, 0, count);
315                 } else {
316                         blk = hammer_alst_meta_alloc_fwd(
317                                     live, live->meta + 1,
318                                     0, count, bl->bl_radix, bl->bl_skip);
319                 }
320                 if (blk != HAMMER_ALIST_BLOCK_NONE)
321                         live->meta->bm_alist_freeblks -= count;
322         }
323         return(blk);
324 }
325
326 int32_t 
327 hammer_alist_alloc_rev(hammer_alist_t live, int32_t count)
328 {
329         hammer_alist_config_t bl = live->config;
330         int32_t blk = HAMMER_ALIST_BLOCK_NONE;
331
332         KKASSERT((count | (count - 1)) == (count << 1) - 1);
333
334         if (bl && count < bl->bl_radix) {
335                 if (bl->bl_skip == 1 && bl->bl_terminal) {
336 #ifndef _KERNEL
337                         KKASSERT(bl->bl_radix == HAMMER_ALIST_BMAP_RADIX);
338 #endif
339                         blk = hammer_alst_leaf_alloc_rev(
340                                     live->meta + 1, 0, count);
341                 } else {
342                         blk = hammer_alst_meta_alloc_rev(
343                                     live, live->meta + 1,
344                                     0, count, bl->bl_radix, bl->bl_skip);
345                 }
346                 if (blk != HAMMER_ALIST_BLOCK_NONE)
347                         live->meta->bm_alist_freeblks -= count;
348         }
349         return(blk);
350 }
351
352 #if 0
353
354 /*
355  * hammer_alist_alloc_from()
356  *
357  *      An extended version of hammer_alist_alloc() which locates free space
358  *      starting at the specified block either forwards or backwards.
359  *      HAMMER_ALIST_BLOCK_NONE is returned if space could not be allocated.
360  *
361  *      Note: when allocating from a particular point forwards space is never
362  *      allocated behind that start point, and similarly when going backwards.
363  */
364 int32_t 
365 hammer_alist_alloc_from(hammer_alist_t live, int32_t count, int32_t start)
366 {
367 }
368
369 #endif
370
371 /*
372  * alist_free()
373  *
374  *      Free up space in the block bitmap.  Return the base of a contiguous
375  *      region.  Panic if an inconsistancy is found.
376  *
377  *      Unlike allocations, there are no alignment requirements for blkno or
378  *      count when freeing blocks.
379  */
380
381 void 
382 hammer_alist_free(hammer_alist_t live, int32_t blkno, int32_t count)
383 {
384         hammer_alist_config_t bl = live->config;
385
386         KKASSERT(blkno + count <= bl->bl_blocks);
387         if (bl->bl_skip == 1 && bl->bl_terminal) {
388 #ifndef _KERNEL
389                 KKASSERT(bl->bl_radix == HAMMER_ALIST_BMAP_RADIX);
390 #endif
391                 hammer_alst_leaf_free(live->meta + 1, blkno, count);
392         } else {
393                 hammer_alst_meta_free(live, live->meta + 1,
394                                       blkno, count,
395                                       bl->bl_radix, bl->bl_skip, 0);
396         }
397         live->meta->bm_alist_freeblks += count;
398 }
399
400 #ifdef ALIST_DEBUG
401
402 /*
403  * alist_print()    - dump radix tree
404  */
405
406 void
407 hammer_alist_print(hammer_alist_t live, int tab)
408 {
409         hammer_alist_config_t bl = live->config;
410
411         kprintf("%*.*sALIST (%d free blocks) {\n",
412                 tab, tab, "", live->meta->bm_alist_freeblks);
413         hammer_alst_radix_print(live, live->meta + 1, 0,
414                                 bl->bl_radix, bl->bl_skip, tab + 4);
415         kprintf("%*.*s}\n", tab, tab, "");
416 }
417
418 #endif
419
420 /************************************************************************
421  *                        ALLOCATION SUPPORT FUNCTIONS                  *
422  ************************************************************************
423  *
424  *      These support functions do all the actual work.  They may seem 
425  *      rather longish, but that's because I've commented them up.  The
426  *      actual code is straight forward.
427  *
428  */
429
430 /*
431  * hammer_alist_leaf_alloc_fwd()
432  *
433  *      Allocate at a leaf in the radix tree (a bitmap).
434  *
435  *      This is the core of the allocator and is optimized for the 1 block
436  *      and the HAMMER_ALIST_BMAP_RADIX block allocation cases.  Other cases
437  *      are somewhat slower.  The 1 block allocation case is log2 and extremely
438  *      quick.
439  */
440
441 static int32_t
442 hammer_alst_leaf_alloc_fwd(hammer_almeta_t scan, int32_t blk, int count)
443 {
444         u_int32_t orig = scan->bm_bitmap;
445
446         /*
447          * Optimize bitmap all-allocated case.  Also, count = 1
448          * case assumes at least 1 bit is free in the bitmap, so
449          * we have to take care of this case here.
450          */
451         if (orig == 0) {
452                 scan->bm_bighint = 0;
453                 return(HAMMER_ALIST_BLOCK_NONE);
454         }
455
456         /*
457          * Optimized code to allocate one bit out of the bitmap
458          */
459         if (count == 1) {
460                 u_int32_t mask;
461                 int j = HAMMER_ALIST_BMAP_RADIX/2;
462                 int r = 0;
463
464                 mask = (u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX/2);
465
466                 while (j) {
467                         if ((orig & mask) == 0) {
468                             r += j;
469                             orig >>= j;
470                         }
471                         j >>= 1;
472                         mask >>= j;
473                 }
474                 scan->bm_bitmap &= ~(1 << r);
475                 return(blk + r);
476         }
477
478         /*
479          * non-optimized code to allocate N bits out of the bitmap.
480          * The more bits, the faster the code runs.  It will run
481          * the slowest allocating 2 bits, but since there aren't any
482          * memory ops in the core loop (or shouldn't be, anyway),
483          * you probably won't notice the difference.
484          *
485          * Similar to the blist case, the alist code also requires
486          * allocations to be power-of-2 sized and aligned to the
487          * size of the allocation, which simplifies the algorithm.
488          */
489         {
490                 int j;
491                 int n = HAMMER_ALIST_BMAP_RADIX - count;
492                 u_int32_t mask;
493
494                 mask = (u_int32_t)-1 >> n;
495
496                 for (j = 0; j <= n; j += count) {
497                         if ((orig & mask) == mask) {
498                                 scan->bm_bitmap &= ~mask;
499                                 return(blk + j);
500                         }
501                         mask = mask << count;
502                 }
503         }
504
505         /*
506          * We couldn't allocate count in this subtree, update bighint.
507          */
508         scan->bm_bighint = count - 1;
509         return(HAMMER_ALIST_BLOCK_NONE);
510 }
511
512 /*
513  * This version allocates blocks in the reverse direction.
514  */
515 static int32_t
516 hammer_alst_leaf_alloc_rev(hammer_almeta_t scan, int32_t blk, int count)
517 {
518         u_int32_t orig = scan->bm_bitmap;
519
520         /*
521          * Optimize bitmap all-allocated case.  Also, count = 1
522          * case assumes at least 1 bit is free in the bitmap, so
523          * we have to take care of this case here.
524          */
525         if (orig == 0) {
526                 scan->bm_bighint = 0;
527                 return(HAMMER_ALIST_BLOCK_NONE);
528         }
529
530         /*
531          * Optimized code to allocate one bit out of the bitmap
532          */
533         if (count == 1) {
534                 u_int32_t mask;
535                 int j = HAMMER_ALIST_BMAP_RADIX/2;
536                 int r = HAMMER_ALIST_BMAP_RADIX - 1;
537
538                 mask = ~((u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX/2));
539
540                 while (j) {
541                         if ((orig & mask) == 0) {
542                             r -= j;
543                             orig <<= j;
544                         }
545                         j >>= 1;
546                         mask <<= j;
547                 }
548                 scan->bm_bitmap &= ~(1 << r);
549                 return(blk + r);
550         }
551
552         /*
553          * non-optimized code to allocate N bits out of the bitmap.
554          * The more bits, the faster the code runs.  It will run
555          * the slowest allocating 2 bits, but since there aren't any
556          * memory ops in the core loop (or shouldn't be, anyway),
557          * you probably won't notice the difference.
558          *
559          * Similar to the blist case, the alist code also requires
560          * allocations to be power-of-2 sized and aligned to the
561          * size of the allocation, which simplifies the algorithm.
562          *
563          * initial mask if count == 2:  1100....0000
564          */
565         {
566                 int j;
567                 int n = HAMMER_ALIST_BMAP_RADIX - count;
568                 u_int32_t mask;
569
570                 mask = ((u_int32_t)-1 >> n) << n;
571
572                 for (j = n; j >= 0; j -= count) {
573                         if ((orig & mask) == mask) {
574                                 scan->bm_bitmap &= ~mask;
575                                 return(blk + j);
576                         }
577                         mask = mask >> count;
578                 }
579         }
580
581         /*
582          * We couldn't allocate count in this subtree, update bighint.
583          */
584         scan->bm_bighint = count - 1;
585         return(HAMMER_ALIST_BLOCK_NONE);
586 }
587
588 /*
589  * hammer_alist_meta_alloc_fwd()
590  *
591  *      Allocate at a meta in the radix tree.
592  *
593  *      Attempt to allocate at a meta node.  If we can't, we update
594  *      bighint and return a failure.  Updating bighint optimize future
595  *      calls that hit this node.  We have to check for our collapse cases
596  *      and we have a few optimizations strewn in as well.
597  */
598 static int32_t
599 hammer_alst_meta_alloc_fwd(hammer_alist_t live, hammer_almeta_t scan,
600                            int32_t blk, int32_t count,
601                            int32_t radix, int skip
602 ) {
603         hammer_alist_config_t bl;
604         int i;
605         u_int32_t mask;
606         u_int32_t pmask;
607         int next_skip;
608
609         /*
610          * ALL-ALLOCATED special case
611          */
612         if (scan->bm_bitmap == 0)  {
613                 scan->bm_bighint = 0;
614                 return(HAMMER_ALIST_BLOCK_NONE);
615         }
616
617         radix /= HAMMER_ALIST_META_RADIX;
618         bl = live->config;
619
620         /*
621          * Radix now represents each bitmap entry for this meta node.  If
622          * the number of blocks being allocated can be fully represented,
623          * we allocate directly out of this meta node.
624          *
625          * Meta node bitmaps use 2 bits per block.
626          *
627          *      00      ALL-ALLOCATED
628          *      01      PARTIALLY-FREE/PARTIALLY-ALLOCATED
629          *      10      (RESERVED)
630          *      11      ALL-FREE
631          */
632         if (count >= radix) {
633                 int n = count / radix * 2;      /* number of bits */
634                 int j;
635
636                 mask = (u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX - n);
637                 for (j = 0; j < HAMMER_ALIST_META_RADIX; j += n / 2) {
638                         if ((scan->bm_bitmap & mask) == mask) {
639                                 scan->bm_bitmap &= ~mask;
640                                 return(blk + j * radix);
641                         }
642                         mask <<= n;
643                 }
644                 if (scan->bm_bighint >= count)
645                         scan->bm_bighint = count >> 1;
646                 return(HAMMER_ALIST_BLOCK_NONE);
647         }
648
649         /*
650          * If the count is too big we couldn't allocate anything from a
651          * recursion even if the sub-tree were entirely free.
652          */
653         if (count > radix)
654                 goto failed;
655
656         /*
657          * If not we have to recurse.
658          */
659         mask = 0x00000003;
660         pmask = 0x00000001;
661
662         if (skip == 1) {
663                 /*
664                  * If skip is 1 we are a meta leaf node, which means there
665                  * is another allocation layer we have to dive down into.
666                  */
667                 for (i = 0; i < HAMMER_ALIST_META_RADIX; ++i) {
668                         /*
669                          * If the next layer is completely free then call
670                          * its init function to initialize it and mark it
671                          * partially free.
672                          *
673                          * bl_radix_init returns an error code or 0 on
674                          * success.
675                          */
676                         if ((scan->bm_bitmap & mask) == mask) {
677                                 int32_t v;
678
679                                 v = bl->bl_blocks - blk;
680                                 if (v > radix)
681                                         v = radix;
682                                 if (bl->bl_radix_init(live->info, blk, radix, v) == 0) {
683                                         scan->bm_bitmap &= ~mask;
684                                         scan->bm_bitmap |= pmask;
685                                 }
686                         }
687
688                         /*
689                          * If there may be some free blocks try to allocate
690                          * out of the layer.  If the layer indicates that
691                          * it is completely full then clear both bits to
692                          * propogate the condition.
693                          */
694                         if ((scan->bm_bitmap & mask) == pmask) {
695                                 int32_t r;
696                                 int32_t full;
697
698                                 r = bl->bl_radix_alloc_fwd(live->info, blk,
699                                                            radix, count, &full);
700                                 if (r != HAMMER_ALIST_BLOCK_NONE) {
701                                         if (full)
702                                                 scan->bm_bitmap &= ~mask;
703                                         return(r);
704                                 }
705                         }
706                         blk += radix;
707                         mask <<= 2;
708                         pmask <<= 2;
709                 }
710         } else {
711                 /*
712                  * Otherwise there are sub-records in the current a-list
713                  * layer.  We can also peek into the sub-layers to get
714                  * more accurate size hints.
715                  */
716                 next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
717                 for (i = 1; i <= skip; i += next_skip) {
718                         if (scan[i].bm_bighint == (int32_t)-1) {
719                                 /* 
720                                  * Terminator
721                                  */
722                                 break;
723                         }
724                         if ((scan->bm_bitmap & mask) == mask) {
725                                 scan[i].bm_bitmap = (u_int32_t)-1;
726                                 scan[i].bm_bighint = radix;
727                         }
728
729                         if (count <= scan[i].bm_bighint) {
730                                 /*
731                                  * count fits in object, recurse into the
732                                  * next layer.  If the next_skip is 1 it
733                                  * will be either a normal leaf or a meta
734                                  * leaf.
735                                  */
736                                 int32_t r;
737
738                                 if (next_skip == 1 && bl->bl_terminal) {
739                                         r = hammer_alst_leaf_alloc_fwd(
740                                                 &scan[i], blk, count);
741                                 } else {
742                                         r = hammer_alst_meta_alloc_fwd(
743                                                 live, &scan[i],
744                                                 blk, count,
745                                                 radix, next_skip);
746                                 }
747                                 if (r != HAMMER_ALIST_BLOCK_NONE) {
748                                         if (scan[i].bm_bitmap == 0) {
749                                                 scan->bm_bitmap &= ~mask;
750                                         } else {
751                                                 scan->bm_bitmap &= ~mask;
752                                                 scan->bm_bitmap |= pmask;
753                                         }
754                                         return(r);
755                                 }
756                         }
757                         blk += radix;
758                         mask <<= 2;
759                         pmask <<= 2;
760                 }
761         }
762
763 failed:
764         /*
765          * We couldn't allocate count in this subtree, update bighint.
766          */
767         if (scan->bm_bighint >= count)
768                 scan->bm_bighint = count >> 1;
769         return(HAMMER_ALIST_BLOCK_NONE);
770 }
771
772 /*
773  * This version allocates blocks in the reverse direction.
774  */
775 static int32_t
776 hammer_alst_meta_alloc_rev(hammer_alist_t live, hammer_almeta_t scan,
777                            int32_t blk, int32_t count,
778                            int32_t radix, int skip
779 ) {
780         hammer_alist_config_t bl;
781         int i;
782         int j;
783         u_int32_t mask;
784         u_int32_t pmask;
785         int next_skip;
786
787         /*
788          * ALL-ALLOCATED special case
789          */
790         if (scan->bm_bitmap == 0)  {
791                 scan->bm_bighint = 0;
792                 return(HAMMER_ALIST_BLOCK_NONE);
793         }
794
795         radix /= HAMMER_ALIST_META_RADIX;
796         bl = live->config;
797
798         /*
799          * Radix now represents each bitmap entry for this meta node.  If
800          * the number of blocks being allocated can be fully represented,
801          * we allocate directly out of this meta node.
802          *
803          * Meta node bitmaps use 2 bits per block.
804          *
805          *      00      ALL-ALLOCATED
806          *      01      PARTIALLY-FREE/PARTIALLY-ALLOCATED
807          *      10      (RESERVED)
808          *      11      ALL-FREE
809          */
810         if (count >= radix) {
811                 int n = count / radix * 2;      /* number of bits */
812                 int j;
813
814                 /*
815                  * Initial mask if e.g. n == 2:  1100....0000
816                  */
817                 mask = (u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX - n) <<
818                         (HAMMER_ALIST_BMAP_RADIX - n);
819                 for (j = HAMMER_ALIST_META_RADIX - n / 2; j >= 0; j -= n / 2) {
820                         if ((scan->bm_bitmap & mask) == mask) {
821                                 scan->bm_bitmap &= ~mask;
822                                 return(blk + j * radix);
823                         }
824                         mask >>= n;
825                 }
826                 if (scan->bm_bighint >= count)
827                         scan->bm_bighint = count >> 1;
828                 return(HAMMER_ALIST_BLOCK_NONE);
829         }
830
831         /*
832          * If the count is too big we couldn't allocate anything from a
833          * recursion even if the sub-tree were entirely free.
834          */
835         if (count > radix)
836                 goto failed;
837
838         if (skip == 1) {
839                 /*
840                  * We need the recurse but we are at a meta node leaf, which
841                  * means there is another layer under us.
842                  */
843                 mask = 0xC0000000;
844                 pmask = 0x40000000;
845                 blk += radix * HAMMER_ALIST_META_RADIX - radix;
846
847                 for (i = 0; i < HAMMER_ALIST_META_RADIX; ++i) {
848                         /*
849                          * If the next layer is completely free then call
850                          * its init function to initialize it and mark it
851                          * partially free.
852                          */
853                         if ((scan->bm_bitmap & mask) == mask) {
854                                 int32_t v;
855
856                                 v = bl->bl_blocks - blk;
857                                 if (v > radix)
858                                         v = radix;
859                                 if (bl->bl_radix_init(live->info, blk, radix, v) == 0) {
860                                         scan->bm_bitmap &= ~mask;
861                                         scan->bm_bitmap |= pmask;
862                                 }
863                         }
864
865                         /*
866                          * If there may be some free blocks try to allocate
867                          * out of the layer.  If the layer indicates that
868                          * it is completely full then clear both bits to
869                          * propogate the condition.
870                          */
871                         if ((scan->bm_bitmap & mask) == pmask) {
872                                 int32_t r;
873                                 int32_t full;
874
875                                 r = bl->bl_radix_alloc_rev(live->info, blk,
876                                                            radix, count, &full);
877                                 if (r != HAMMER_ALIST_BLOCK_NONE) {
878                                         if (full)
879                                                 scan->bm_bitmap &= ~mask;
880                                         return(r);
881                                 }
882                         }
883                         mask >>= 2;
884                         pmask >>= 2;
885                         blk -= radix;
886                 }
887         } else {
888                 /*
889                  * Since we are going in the reverse direction we need an
890                  * extra loop to determine if there is a terminator, then
891                  * run backwards.
892                  *
893                  * This is a little weird but we do not want to overflow the
894                  * mask/pmask in the loop.
895                  */
896                 next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
897                 j = 0;
898                 for (i = 1; i <= skip; i += next_skip) {
899                         if (scan[i].bm_bighint == (int32_t)-1)
900                                 break;
901                         blk += radix;
902                         j += 2;
903                 }
904                 blk -= radix;
905                 j -= 2;
906                 mask = 0x00000003 << j;
907                 pmask = 0x00000001 << j;
908                 i -= next_skip;
909
910                 while (i >= 1) {
911                         /*
912                          * Initialize the bitmap in the child if allocating
913                          * from the all-free case.
914                          */
915                         if ((scan->bm_bitmap & mask) == mask) {
916                                 scan[i].bm_bitmap = (u_int32_t)-1;
917                                 scan[i].bm_bighint = radix;
918                         }
919
920                         /*
921                          * Handle various count cases.  Bighint may be too
922                          * large but is never too small.
923                          */
924                         if (count <= scan[i].bm_bighint) {
925                                 /*
926                                  * count fits in object
927                                  */
928                                 int32_t r;
929                                 if (next_skip == 1 && bl->bl_terminal) {
930                                         r = hammer_alst_leaf_alloc_rev(
931                                                 &scan[i], blk, count);
932                                 } else {
933                                         r = hammer_alst_meta_alloc_rev(
934                                                 live, &scan[i],
935                                                 blk, count,
936                                                 radix, next_skip);
937                                 }
938                                 if (r != HAMMER_ALIST_BLOCK_NONE) {
939                                         if (scan[i].bm_bitmap == 0) {
940                                                 scan->bm_bitmap &= ~mask;
941                                         } else {
942                                                 scan->bm_bitmap &= ~mask;
943                                                 scan->bm_bitmap |= pmask;
944                                         }
945                                         return(r);
946                                 }
947                         } else if (count > radix) {
948                                 /*
949                                  * count does not fit in object even if it were
950                                  * completely free.
951                                  */
952                                 break;
953                         }
954                         blk -= radix;
955                         mask >>= 2;
956                         pmask >>= 2;
957                         i -= next_skip;
958                 }
959         }
960
961 failed:
962         /*
963          * We couldn't allocate count in this subtree, update bighint.
964          * Since we are restricted to powers of 2, the next highest count
965          * we might be able to allocate is (count >> 1).
966          */
967         if (scan->bm_bighint >= count)
968                 scan->bm_bighint = count >> 1;
969         return(HAMMER_ALIST_BLOCK_NONE);
970 }
971
972 /*
973  * BLST_LEAF_FREE()
974  *
975  *      Free allocated blocks from leaf bitmap.  The allocation code is
976  *      restricted to powers of 2, the freeing code is not.
977  */
978 static void
979 hammer_alst_leaf_free(hammer_almeta_t scan, int32_t blk, int count) {
980         /*
981          * free some data in this bitmap
982          *
983          * e.g.
984          *      0000111111111110000
985          *          \_________/\__/
986          *              v        n
987          */
988         int n = blk & (HAMMER_ALIST_BMAP_RADIX - 1);
989         u_int32_t mask;
990
991         mask = ((u_int32_t)-1 << n) &
992             ((u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX - count - n));
993
994         if (scan->bm_bitmap & mask)
995                 panic("hammer_alst_radix_free: freeing free block");
996         scan->bm_bitmap |= mask;
997
998         /*
999          * We could probably do a better job here.  We are required to make
1000          * bighint at least as large as the biggest contiguous block of 
1001          * data.  If we just shoehorn it, a little extra overhead will
1002          * be incured on the next allocation (but only that one typically).
1003          */
1004         scan->bm_bighint = HAMMER_ALIST_BMAP_RADIX;
1005 }
1006
1007 /*
1008  * BLST_META_FREE()
1009  *
1010  *      Free allocated blocks from radix tree meta info.
1011  *
1012  *      This support routine frees a range of blocks from the bitmap.
1013  *      The range must be entirely enclosed by this radix node.  If a
1014  *      meta node, we break the range down recursively to free blocks
1015  *      in subnodes (which means that this code can free an arbitrary
1016  *      range whereas the allocation code cannot allocate an arbitrary
1017  *      range).
1018  */
1019
1020 static void 
1021 hammer_alst_meta_free(hammer_alist_t live, hammer_almeta_t scan, 
1022                       int32_t freeBlk, int32_t count,
1023                       int32_t radix, int skip, int32_t blk
1024 ) {
1025         hammer_alist_config_t bl;
1026         int next_skip;
1027         u_int32_t mask;
1028         u_int32_t pmask;
1029         int i;
1030
1031         /*
1032          * Break the free down into its components.  Because it is so easy
1033          * to implement, frees are not limited to power-of-2 sizes.
1034          *
1035          * Each block in a meta-node bitmap takes two bits.
1036          */
1037         radix /= HAMMER_ALIST_META_RADIX;
1038         bl = live->config;
1039
1040         i = (freeBlk - blk) / radix;
1041         blk += i * radix;
1042         mask = 0x00000003 << (i * 2);
1043         pmask = 0x00000001 << (i * 2);
1044
1045         if (skip == 1) {
1046                 /*
1047                  * Our meta node is a leaf node, which means it must recurse
1048                  * into another allocator.
1049                  */
1050                 while (i < HAMMER_ALIST_META_RADIX && blk < freeBlk + count) {
1051                         int32_t v;
1052
1053                         v = blk + radix - freeBlk;
1054                         if (v > count)
1055                                 v = count;
1056
1057                         if (scan->bm_bighint == (int32_t)-1)
1058                                 panic("hammer_alst_meta_free: freeing unexpected range");
1059
1060                         if (freeBlk == blk && count >= radix) {
1061                                 /*
1062                                  * All-free case, no need to update sub-tree
1063                                  */
1064                                 scan->bm_bitmap |= mask;
1065                                 scan->bm_bighint = radix * HAMMER_ALIST_META_RADIX;
1066                                 /* XXX bighint not being set properly */
1067                         } else {
1068                                 /*
1069                                  * Recursion case
1070                                  */
1071                                 int32_t empty;
1072
1073                                 bl->bl_radix_free(live->info, freeBlk, v,
1074                                                   radix, blk, &empty);
1075                                 if (empty) {
1076                                         scan->bm_bitmap |= mask;
1077                                         scan->bm_bighint = radix * HAMMER_ALIST_META_RADIX;
1078                                         /* XXX bighint not being set properly */
1079                                 } else {
1080                                         scan->bm_bitmap |= pmask;
1081                                         if (scan->bm_bighint < radix / 2)
1082                                                 scan->bm_bighint = radix / 2;
1083                                         /* XXX bighint not being set properly */
1084                                 }
1085                         }
1086                         ++i;
1087                         mask <<= 2;
1088                         pmask <<= 2;
1089                         count -= v;
1090                         freeBlk += v;
1091                         blk += radix;
1092                 }
1093         } else {
1094                 next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
1095                 i = 1 + i * next_skip;
1096
1097                 while (i <= skip && blk < freeBlk + count) {
1098                         int32_t v;
1099
1100                         v = blk + radix - freeBlk;
1101                         if (v > count)
1102                                 v = count;
1103
1104                         if (scan->bm_bighint == (int32_t)-1)
1105                                 panic("hammer_alst_meta_free: freeing unexpected range");
1106
1107                         if (freeBlk == blk && count >= radix) {
1108                                 /*
1109                                  * All-free case, no need to update sub-tree
1110                                  */
1111                                 scan->bm_bitmap |= mask;
1112                                 scan->bm_bighint = radix * HAMMER_ALIST_META_RADIX;
1113                                 /* XXX bighint not being set properly */
1114                         } else {
1115                                 /*
1116                                  * Recursion case
1117                                  */
1118                                 if (next_skip == 1 && bl->bl_terminal) {
1119                                         hammer_alst_leaf_free(&scan[i], freeBlk, v);
1120                                 } else {
1121                                         hammer_alst_meta_free(live, &scan[i],
1122                                                               freeBlk, v,
1123                                                               radix, next_skip,
1124                                                               blk);
1125                                 }
1126                                 if (scan[i].bm_bitmap == (u_int32_t)-1)
1127                                         scan->bm_bitmap |= mask;
1128                                 else
1129                                         scan->bm_bitmap |= pmask;
1130                                 if (scan->bm_bighint < scan[i].bm_bighint)
1131                                         scan->bm_bighint = scan[i].bm_bighint;
1132                         }
1133                         mask <<= 2;
1134                         pmask <<= 2;
1135                         count -= v;
1136                         freeBlk += v;
1137                         blk += radix;
1138                         i += next_skip;
1139                 }
1140         }
1141 }
1142
1143 /*
1144  * hammer_alst_radix_init()
1145  *
1146  *      Initialize our meta structures and bitmaps and calculate the exact
1147  *      number of meta-nodes required to manage 'count' blocks.  
1148  *
1149  *      The required space may be truncated due to early termination records.
1150  */
1151 static int32_t  
1152 hammer_alst_radix_init(hammer_almeta_t scan, int32_t radix,
1153                        int skip, int32_t count)
1154 {
1155         int i;
1156         int next_skip;
1157         int32_t memindex = 1;
1158         u_int32_t mask;
1159         u_int32_t pmask;
1160
1161         /*
1162          * Basic initialization of the almeta for meta or leaf nodes
1163          */
1164         if (scan) {
1165                 scan->bm_bighint = 0;
1166                 scan->bm_bitmap = 0;
1167         }
1168
1169         /*
1170          * We are at a leaf, we only eat one meta element. 
1171          */
1172         if (skip == 1)
1173                 return(memindex);
1174
1175         /*
1176          * Meta node.  If allocating the entire object we can special
1177          * case it.  However, we need to figure out how much memory
1178          * is required to manage 'count' blocks, so we continue on anyway.
1179          */
1180         radix /= HAMMER_ALIST_META_RADIX;
1181         next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
1182         mask = 0x00000003;
1183         pmask = 0x00000001;
1184
1185         for (i = 1; i <= skip; i += next_skip) {
1186                 /*
1187                  * We eat up to this record
1188                  */
1189                 memindex = i;
1190
1191                 if (count >= radix) {
1192                         /*
1193                          * Allocate the entire object
1194                          */
1195                         memindex += hammer_alst_radix_init(
1196                             ((scan) ? &scan[i] : NULL),
1197                             radix,
1198                             next_skip,
1199                             radix
1200                         );
1201                         count -= radix;
1202                         /* already marked as wholely allocated */
1203                 } else if (count > 0) {
1204                         /*
1205                          * Allocate a partial object
1206                          */
1207                         memindex += hammer_alst_radix_init(
1208                             ((scan) ? &scan[i] : NULL),
1209                             radix,
1210                             next_skip,
1211                             count
1212                         );
1213                         count = 0;
1214
1215                         /*
1216                          * Mark as partially allocated
1217                          */
1218                         if (scan)
1219                                 scan->bm_bitmap |= pmask;
1220                 } else {
1221                         /*
1222                          * Add terminator and break out.  The terminal
1223                          * eats the meta node at scan[i].
1224                          */
1225                         ++memindex;
1226                         if (scan)
1227                                 scan[i].bm_bighint = (int32_t)-1;
1228                         /* already marked as wholely allocated */
1229                         break;
1230                 }
1231                 mask <<= 2;
1232                 pmask <<= 2;
1233         }
1234         return(memindex);
1235 }
1236
1237 #ifdef ALIST_DEBUG
1238
1239 static void     
1240 hammer_alst_radix_print(hammer_alist_t live, hammer_almeta_t scan,
1241                         int32_t blk, int32_t radix, int skip, int tab)
1242 {
1243         int i;
1244         int next_skip;
1245         int lastState = 0;
1246         u_int32_t mask;
1247
1248         if (skip == 1 && live->config->bl_terminal) {
1249                 kprintf(
1250                     "%*.*s(%04x,%d): bitmap %08x big=%d\n", 
1251                     tab, tab, "",
1252                     blk, radix,
1253                     scan->bm_bitmap,
1254                     scan->bm_bighint
1255                 );
1256                 return;
1257         }
1258
1259         if (scan->bm_bitmap == 0) {
1260                 kprintf(
1261                     "%*.*s(%04x,%d) ALL ALLOCATED\n",
1262                     tab, tab, "",
1263                     blk,
1264                     radix
1265                 );
1266                 return;
1267         }
1268         if (scan->bm_bitmap == (u_int32_t)-1) {
1269                 kprintf(
1270                     "%*.*s(%04x,%d) ALL FREE\n",
1271                     tab, tab, "",
1272                     blk,
1273                     radix
1274                 );
1275                 return;
1276         }
1277
1278         kprintf(
1279             "%*.*s(%04x,%d): %s (%d) bitmap=%08x big=%d {\n",
1280             tab, tab, "",
1281             blk, radix,
1282             (skip == 1 ? "LAYER" : "subtree"),
1283             radix,
1284             scan->bm_bitmap,
1285             scan->bm_bighint
1286         );
1287
1288         radix /= HAMMER_ALIST_META_RADIX;
1289         tab += 4;
1290         mask = 0x00000003;
1291
1292         if (skip == 1) {
1293                 for (i = 0; i < HAMMER_ALIST_META_RADIX; ++i) {
1294                         if ((scan->bm_bitmap & mask) == mask) {
1295                                 kprintf(
1296                                     "%*.*s(%04x,%d): ALL FREE\n",
1297                                     tab, tab, "",
1298                                     blk, radix
1299                                 );
1300                         } else if ((scan->bm_bitmap & mask) == 0) {
1301                                 kprintf(
1302                                     "%*.*s(%04x,%d): ALL ALLOCATED\n",
1303                                     tab, tab, "",
1304                                     blk, radix
1305                                 );
1306                         } else {
1307                                 live->config->bl_radix_print(
1308                                                 live->info, blk, radix, tab);
1309                         }
1310                         blk += radix;
1311                         mask <<= 2;
1312                 }
1313         } else {
1314                 next_skip = ((u_int)skip / HAMMER_ALIST_META_RADIX);
1315
1316                 for (i = 1; i <= skip; i += next_skip) {
1317                         if (scan[i].bm_bighint == (int32_t)-1) {
1318                                 kprintf(
1319                                     "%*.*s(%04x,%d): Terminator\n",
1320                                     tab, tab, "",
1321                                     blk, radix
1322                                 );
1323                                 lastState = 0;
1324                                 break;
1325                         }
1326                         if ((scan->bm_bitmap & mask) == mask) {
1327                                 kprintf(
1328                                     "%*.*s(%04x,%d): ALL FREE\n",
1329                                     tab, tab, "",
1330                                     blk, radix
1331                                 );
1332                         } else if ((scan->bm_bitmap & mask) == 0) {
1333                                 kprintf(
1334                                     "%*.*s(%04x,%d): ALL ALLOCATED\n",
1335                                     tab, tab, "",
1336                                     blk, radix
1337                                 );
1338                         } else {
1339                                 hammer_alst_radix_print(
1340                                     live,
1341                                     &scan[i],
1342                                     blk,
1343                                     radix,
1344                                     next_skip - 1,
1345                                     tab
1346                                 );
1347                         }
1348                         blk += radix;
1349                         mask <<= 2;
1350                 }
1351         }
1352         tab -= 4;
1353
1354         kprintf(
1355             "%*.*s}\n",
1356             tab, tab, ""
1357         );
1358 }
1359
1360 #endif
1361
1362 #ifdef ALIST_DEBUG
1363
1364 static struct hammer_alist_live **layers;       /* initialized by main */
1365 static int32_t layer_radix = -1;
1366
1367 static
1368 int
1369 debug_radix_init(void *info, int32_t blk, int32_t radix, int32_t count)
1370 {
1371         hammer_alist_t layer;
1372         int layer_no = blk / layer_radix;
1373
1374         printf("lower layer: init (%04x,%d) for %d blks\n", blk, radix, count);
1375         KKASSERT(layers[layer_no] == NULL);
1376         layer = layers[layer_no] = hammer_alist_create(count, 1, NULL); 
1377         hammer_alist_free(layer, 0, count);
1378         return(0);
1379 }
1380
1381 static
1382 int32_t
1383 debug_radix_alloc_fwd(void *info, int32_t blk, int32_t radix,
1384                       int32_t count, int32_t *fullp)
1385 {
1386         hammer_alist_t layer = layers[blk / layer_radix];
1387
1388         return(hammer_alist_alloc(layer, count));
1389 }
1390
1391 static
1392 int32_t
1393 debug_radix_alloc_rev(void *info, int32_t blk, int32_t radix,
1394                       int32_t count, int32_t *fullp)
1395 {
1396         hammer_alist_t layer = layers[blk / layer_radix];
1397
1398         blk = hammer_alist_alloc_rev(layer, count);
1399         *fullp = (layer->meta->bm_alist_freeblks == 0);
1400         if (*fullp) {
1401                 hammer_alist_destroy(layer, NULL);
1402                 layers[blk / layer_radix] = NULL;
1403         }
1404         return(blk);
1405 }
1406
1407 static
1408 void
1409 debug_radix_free(void *info, int32_t freeBlk, int32_t count,
1410                  int32_t radix, int32_t blk, int32_t *emptyp)
1411 {
1412         int layer_no = blk / layer_radix;
1413         hammer_alist_t layer = layers[layer_no];
1414
1415         if (layer == NULL) {
1416                 /*
1417                  * XXX layer_radix isn't correct if the total number
1418                  * of blocks only partially fills this layer.  Don't
1419                  * worry about it.
1420                  */
1421                 layer = hammer_alist_create(layer_radix, 1, NULL); 
1422                 layers[layer_no] = layer;
1423         }
1424         hammer_alist_free(layer, freeBlk - blk, count);
1425         *emptyp = (layer->meta->bm_alist_freeblks == layer_radix);
1426 }
1427
1428 static
1429 void
1430 debug_radix_print(void *info, int32_t blk, int32_t radix, int tab)
1431 {
1432         hammer_alist_t layer = layers[blk / layer_radix];
1433
1434         hammer_alist_print(layer, tab);
1435 }
1436
1437 int
1438 main(int ac, char **av)
1439 {
1440         int32_t size = -1;
1441         int i;
1442         hammer_alist_t live;
1443         hammer_almeta_t meta = NULL;
1444
1445         for (i = 1; i < ac; ++i) {
1446                 const char *ptr = av[i];
1447                 if (*ptr != '-') {
1448                         if (size == -1)
1449                                 size = strtol(ptr, NULL, 0);
1450                         else if (layer_radix == -1)
1451                                 layer_radix = strtol(ptr, NULL, 0);
1452                         else
1453                                 ;
1454                         continue;
1455                 }
1456                 ptr += 2;
1457                 fprintf(stderr, "Bad option: %s\n", ptr - 2);
1458                 exit(1);
1459         }
1460         if (size == -1)
1461                 size = 1024;
1462         if (layer_radix == -1)
1463                 layer_radix = 1;        /* no second storage layer */
1464         if ((size ^ (size - 1)) != (size << 1) - 1) {
1465                 fprintf(stderr, "size must be a power of 2\n");
1466                 exit(1);
1467         }
1468         if ((layer_radix ^ (layer_radix - 1)) != (layer_radix << 1) - 1) {
1469                 fprintf(stderr, "the second layer radix must be a power of 2\n");
1470                 exit(1);
1471         }
1472
1473         live = hammer_alist_create(size, layer_radix, NULL);
1474         layers = calloc(size, sizeof(hammer_alist_t));
1475
1476         printf("A-LIST TEST %d blocks, first-layer radix %d, "
1477                "second-layer radix %d\n",
1478                 size, live->config->bl_radix / layer_radix, layer_radix);
1479
1480         live->config->bl_radix_init = debug_radix_init;
1481         live->config->bl_radix_alloc_fwd = debug_radix_alloc_fwd;
1482         live->config->bl_radix_alloc_rev = debug_radix_alloc_rev;
1483         live->config->bl_radix_free = debug_radix_free;
1484         live->config->bl_radix_print = debug_radix_print;
1485
1486         hammer_alist_free(live, 0, size);
1487
1488         for (;;) {
1489                 char buf[1024];
1490                 int32_t da = 0;
1491                 int32_t count = 0;
1492                 int32_t blk;
1493
1494                 kprintf("%d/%d> ",
1495                         live->meta->bm_alist_freeblks, size);
1496                 fflush(stdout);
1497                 if (fgets(buf, sizeof(buf), stdin) == NULL)
1498                         break;
1499                 switch(buf[0]) {
1500                 case 'p':
1501                         hammer_alist_print(live, 0);
1502                         break;
1503                 case 'a':
1504                         if (sscanf(buf + 1, "%d", &count) == 1) {
1505                                 blk = hammer_alist_alloc(live, count);
1506                                 kprintf("    R=%04x\n", blk);
1507                         } else {
1508                                 kprintf("?\n");
1509                         }
1510                         break;
1511                 case 'r':
1512                         if (sscanf(buf + 1, "%d", &count) == 1) {
1513                                 blk = hammer_alist_alloc_rev(live, count);
1514                                 kprintf("    R=%04x\n", blk);
1515                         } else {
1516                                 kprintf("?\n");
1517                         }
1518                         break;
1519                 case 'f':
1520                         if (sscanf(buf + 1, "%x %d", &da, &count) == 2) {
1521                                 hammer_alist_free(live, da, count);
1522                         } else {
1523                                 kprintf("?\n");
1524                         }
1525                         break;
1526                 case '?':
1527                 case 'h':
1528                         puts(
1529                             "p          -print\n"
1530                             "a %d       -allocate\n"
1531                             "r %d       -allocate reverse\n"
1532                             "f %x %d    -free\n"
1533                             "h/?        -help"
1534                         );
1535                         break;
1536                 default:
1537                         kprintf("?\n");
1538                         break;
1539                 }
1540         }
1541         return(0);
1542 }
1543
1544 void
1545 panic(const char *ctl, ...)
1546 {
1547         __va_list va;
1548
1549         __va_start(va, ctl);
1550         vfprintf(stderr, ctl, va);
1551         fprintf(stderr, "\n");
1552         __va_end(va);
1553         exit(1);
1554 }
1555
1556 #endif
1557