Merge from vendor branch LESS:
[dragonfly.git] / sys / vfs / hammer / hammer_alist.c
1 /*
2  * HAMMER_ALIST.C -
3  *
4  * Bitmap allocator/deallocator, using a radix tree with hinting.
5  * Unlimited-size allocations, power-of-2 only, power-of-2 aligned results
6  * only.  This module has been separated from the generic kernel module and
7  * written specifically for embedding in HAMMER storage structures.
8  * 
9  * Copyright (c) 2007 The DragonFly Project.  All rights reserved.
10  * 
11  * This code is derived from software contributed to The DragonFly Project
12  * by Matthew Dillon <dillon@backplane.com>
13  * 
14  * Redistribution and use in source and binary forms, with or without
15  * modification, are permitted provided that the following conditions
16  * are met:
17  * 
18  * 1. Redistributions of source code must retain the above copyright
19  *    notice, this list of conditions and the following disclaimer.
20  * 2. Redistributions in binary form must reproduce the above copyright
21  *    notice, this list of conditions and the following disclaimer in
22  *    the documentation and/or other materials provided with the
23  *    distribution.
24  * 3. Neither the name of The DragonFly Project nor the names of its
25  *    contributors may be used to endorse or promote products derived
26  *    from this software without specific, prior written permission.
27  * 
28  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
29  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
30  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
31  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE
32  * COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
33  * INCIDENTAL, SPECIAL, EXEMPLARY OR CONSEQUENTIAL DAMAGES (INCLUDING,
34  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
35  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
36  * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
37  * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
38  * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
39  * SUCH DAMAGE.
40  *
41  * $DragonFly: src/sys/vfs/hammer/Attic/hammer_alist.c,v 1.4 2007/11/01 20:53:05 dillon Exp $
42  */
43 /*
44  * This module implements a generic allocator through the use of a hinted
45  * radix tree.  All allocations must be in powers of 2 and will return
46  * similarly aligned results.  The radix tree typically recurses within
47  * a memory buffer and then continues its recursion by chaining to other
48  * memory buffers, creating a seemless whole capable of managing any amount
49  * of storage.
50  *
51  * The radix tree is layed out recursively using a linear array.  Each meta
52  * node is immediately followed (layed out sequentially in memory) by
53  * HAMMER_ALIST_META_RADIX lower level nodes.  This is a recursive structure
54  * but one that can be easily scanned through a very simple 'skip'
55  * calculation.
56  *
57  * The radix tree supports an early-termination optimization which
58  * effectively allows us to efficiently mix large and small allocations
59  * with a single abstraction.  The address space can be partitioned
60  * arbitrarily without adding much in the way of additional meta-storage
61  * for the allocator.
62  *
63  * The radix tree supports allocator layering. By supplying a base_radix > 1
64  * the allocator will issue callbacks to recurse into lower layers once 
65  * higher layers have been exhausted.  Allocations in multiples of base_radix
66  * will be entirely retained in the higher level allocator and never recurse.
67  *
68  * This code can be compiled stand-alone for debugging.
69  */
70
71 #ifdef _KERNEL
72
73 #include <sys/param.h>
74 #include <sys/systm.h>
75 #include <sys/lock.h>
76 #include <sys/kernel.h>
77 #include <sys/malloc.h>
78 #include <vm/vm.h>
79 #include <vm/vm_object.h>
80 #include <vm/vm_kern.h>
81 #include <vm/vm_extern.h>
82 #include <vm/vm_page.h>
83
84 #include "hammer_alist.h"
85 #include "hammer_disk.h"
86
87 #else
88
89 #ifndef ALIST_NO_DEBUG
90 #define ALIST_DEBUG
91 #endif
92
93 #include <sys/types.h>
94 #include <stdio.h>
95 #include <assert.h>
96 #include <string.h>
97 #include <stdlib.h>
98 #include <stdarg.h>
99
100 #define kmalloc(a,b,c)  malloc(a)
101 #define kfree(a,b)      free(a)
102 #define kprintf         printf
103 #define KKASSERT(exp)   assert(exp)
104 struct malloc_type;
105
106 #include "hammer_alist.h"
107
108 void panic(const char *ctl, ...);
109
110 #endif
111
112 /*
113  * static support functions
114  */
115
116 static int32_t hammer_alst_leaf_alloc_fwd(hammer_almeta_t scan,
117                                         int32_t blk, int count, int32_t atblk);
118 static int32_t hammer_alst_meta_alloc_fwd(hammer_alist_t live,
119                                         hammer_almeta_t scan,
120                                         int32_t blk, int32_t count,
121                                         int32_t radix, int skip, int32_t atblk);
122 static int32_t hammer_alst_leaf_alloc_rev(hammer_almeta_t scan,
123                                         int32_t blk, int count, int32_t atblk);
124 static int32_t hammer_alst_meta_alloc_rev(hammer_alist_t live,
125                                         hammer_almeta_t scan,
126                                         int32_t blk, int32_t count,
127                                         int32_t radix, int skip, int32_t atblk);
128 static void hammer_alst_leaf_free(hammer_almeta_t scan,
129                                         int32_t relblk, int count);
130 static void hammer_alst_meta_free(hammer_alist_t live, hammer_almeta_t scan,
131                                         int32_t freeBlk, int32_t count, 
132                                         int32_t radix, int skip, int32_t blk);
133 static int32_t  hammer_alst_radix_init(hammer_almeta_t scan,
134                                         int32_t radix, int skip, int32_t count);
135 #ifdef ALIST_DEBUG
136 static void     hammer_alst_radix_print(hammer_alist_t live,
137                                         hammer_almeta_t scan, int32_t blk,
138                                         int32_t radix, int skip, int tab);
139 #endif
140
141 /*
142  * Initialize an a-list config structure for use.  The config structure
143  * describes the basic structure of an a-list's topology and may be
144  * shared by any number of a-lists which have the same topology.
145  *
146  * blocks is the total number of blocks, that is the number of blocks
147  * handled at this layer multiplied by the base radix.
148  *
149  * When base_radix != 1 the A-list has only meta elements and does not have
150  * any leaves, in order to be able to track partial allocations.
151  */
152 void
153 hammer_alist_template(hammer_alist_config_t bl, int32_t blocks,
154                       int32_t base_radix, int32_t maxmeta)
155 {
156         int radix;
157         int skip;
158
159         /*
160          * Calculate radix and skip field used for scanning.  The leaf nodes
161          * in our tree are either BMAP or META nodes depending on whether
162          * we chain to a lower level allocation layer or not.
163          */
164         if (base_radix == 1)
165                 radix = HAMMER_ALIST_BMAP_RADIX;
166         else
167                 radix = HAMMER_ALIST_META_RADIX;
168         skip = 1;
169
170         while (radix < blocks / base_radix) {
171                 radix *= HAMMER_ALIST_META_RADIX;
172                 skip = skip * HAMMER_ALIST_META_RADIX + 1;
173         }
174
175         /*
176          * Increase the radix based on the number of blocks a lower level
177          * allocator is able to handle at the 'base' of our allocator.
178          * If base_radix != 1 the caller will have to initialize the callback
179          * fields to implement the lower level allocator.
180          */
181         radix *= base_radix;
182
183         bzero(bl, sizeof(*bl));
184
185         bl->bl_blocks = blocks;
186         bl->bl_base_radix = base_radix;
187         bl->bl_radix = radix;
188         bl->bl_skip = skip;
189         bl->bl_rootblks = hammer_alst_radix_init(NULL, bl->bl_radix,
190                                                  bl->bl_skip, blocks);
191         ++bl->bl_rootblks;      /* one more for freeblks header */
192         if (base_radix == 1)
193                 bl->bl_terminal = 1;
194         KKASSERT(maxmeta == 0 || bl->bl_rootblks <= maxmeta);
195
196 #if defined(ALIST_DEBUG)
197         kprintf(
198                 "PRIMARY ALIST LAYER manages %d blocks"
199                 ", requiring %dK (%d bytes) of ram\n",
200                 bl->bl_blocks / bl->bl_base_radix,
201                 (bl->bl_rootblks * sizeof(struct hammer_almeta) + 1023) / 1024,
202                 (bl->bl_rootblks * sizeof(struct hammer_almeta))
203         );
204         kprintf("ALIST raw radix tree contains %d records\n", bl->bl_rootblks);
205 #endif
206 }
207
208 void
209 hammer_alist_init(hammer_alist_t live)
210 {
211         hammer_alist_config_t bl = live->config;
212
213         live->meta->bm_bighint = 0;
214         live->meta->bm_alist_freeblks = 0;
215         hammer_alst_radix_init(live->meta + 1, bl->bl_radix,
216                                bl->bl_skip, bl->bl_blocks);
217 }
218
219 #if !defined(_KERNEL) && defined(ALIST_DEBUG)
220
221 /*
222  * hammer_alist_create()        (userland only)
223  *
224  *      create a alist capable of handling up to the specified number of
225  *      blocks.  blocks must be greater then 0
226  *
227  *      The smallest alist consists of a single leaf node capable of 
228  *      managing HAMMER_ALIST_BMAP_RADIX blocks, or (if base_radix != 1)
229  *      a single meta node capable of managing HAMMER_ALIST_META_RADIX
230  *      blocks which recurses into other storage layers for a total
231  *      handling capability of (HAMMER_ALIST_META_RADIX * base_radix) blocks.
232  *
233  *      Larger a-list's increase their capability exponentially by
234  *      HAMMER_ALIST_META_RADIX.
235  *
236  *      The block count is the total number of blocks inclusive of any
237  *      layering.  blocks can be less then base_radix and will result in
238  *      a radix tree with a single leaf node which then chains down.
239  */
240
241 hammer_alist_t 
242 hammer_alist_create(int32_t blocks, int32_t base_radix,
243                     struct malloc_type *mtype)
244 {
245         hammer_alist_t live;
246         hammer_alist_config_t bl;
247         size_t metasize;
248
249         live = kmalloc(sizeof(*live), mtype, M_WAITOK);
250         live->config = bl = kmalloc(sizeof(*bl), mtype, M_WAITOK);
251         hammer_alist_template(bl, blocks, base_radix, 0);
252
253         metasize = sizeof(*live->meta) * bl->bl_rootblks;
254         live->meta = kmalloc(metasize, mtype, M_WAITOK);
255         bzero(live->meta, metasize);
256
257 #if defined(ALIST_DEBUG)
258         kprintf(
259                 "ALIST representing %d blocks (%d MB of swap)"
260                 ", requiring %dK (%d bytes) of ram\n",
261                 bl->bl_blocks,
262                 bl->bl_blocks * 4 / 1024,
263                 (bl->bl_rootblks * sizeof(*live->meta) + 1023) / 1024,
264                 (bl->bl_rootblks * sizeof(*live->meta))
265         );
266         if (base_radix != 1) {
267                 kprintf("ALIST recurses when it reaches a base_radix of %d\n",
268                         base_radix);
269         }
270         kprintf("ALIST raw radix tree contains %d records\n", bl->bl_rootblks);
271 #endif
272         hammer_alist_init(live);
273         return(live);
274 }
275
276 void
277 hammer_alist_destroy(hammer_alist_t live, struct malloc_type *mtype)
278 {
279         kfree(live->config, mtype);
280         kfree(live->meta, mtype);
281         live->config = NULL;
282         live->meta = NULL;
283         kfree(live, mtype);
284 }
285
286 #endif
287
288 /*
289  * hammer_alist_alloc()
290  *
291  *      Reserve space in the block bitmap.  Return the base of a contiguous
292  *      region or HAMMER_ALIST_BLOCK_NONE if space could not be allocated.
293  */
294
295 int32_t 
296 hammer_alist_alloc(hammer_alist_t live, int32_t count)
297 {
298         int32_t blk = HAMMER_ALIST_BLOCK_NONE;
299         hammer_alist_config_t bl = live->config;
300
301         KKASSERT((count | (count - 1)) == (count << 1) - 1);
302
303         if (bl && count <= bl->bl_radix) {
304                 /*
305                  * When skip is 1 we are at a leaf.  If we are the terminal
306                  * allocator we use our native leaf functions and radix will
307                  * be HAMMER_ALIST_BMAP_RADIX.  Otherwise we are a meta node
308                  * which will chain to another allocator.
309                  */
310                 if (bl->bl_skip == 1 && bl->bl_terminal) {
311 #ifndef _KERNEL
312                         KKASSERT(bl->bl_radix == HAMMER_ALIST_BMAP_RADIX);
313 #endif
314                         blk = hammer_alst_leaf_alloc_fwd(
315                                     live->meta + 1, 0, count, 0);
316                 } else {
317                         blk = hammer_alst_meta_alloc_fwd(
318                                     live, live->meta + 1,
319                                     0, count, bl->bl_radix, bl->bl_skip, 0);
320                 }
321                 if (blk != HAMMER_ALIST_BLOCK_NONE)
322                         live->meta->bm_alist_freeblks -= count;
323         }
324         return(blk);
325 }
326
327 int32_t 
328 hammer_alist_alloc_fwd(hammer_alist_t live, int32_t count, int32_t atblk)
329 {
330         int32_t blk = HAMMER_ALIST_BLOCK_NONE;
331         hammer_alist_config_t bl = live->config;
332
333         KKASSERT((count | (count - 1)) == (count << 1) - 1);
334
335         if (bl && count <= bl->bl_radix) {
336                 /*
337                  * When skip is 1 we are at a leaf.  If we are the terminal
338                  * allocator we use our native leaf functions and radix will
339                  * be HAMMER_ALIST_BMAP_RADIX.  Otherwise we are a meta node
340                  * which will chain to another allocator.
341                  */
342                 if (bl->bl_skip == 1 && bl->bl_terminal) {
343 #ifndef _KERNEL
344                         KKASSERT(bl->bl_radix == HAMMER_ALIST_BMAP_RADIX);
345 #endif
346                         blk = hammer_alst_leaf_alloc_fwd(
347                                     live->meta + 1, 0, count, atblk);
348                 } else {
349                         blk = hammer_alst_meta_alloc_fwd(
350                                     live, live->meta + 1,
351                                     0, count, bl->bl_radix, bl->bl_skip, atblk);
352                 }
353                 if (blk != HAMMER_ALIST_BLOCK_NONE)
354                         live->meta->bm_alist_freeblks -= count;
355         }
356         return(blk);
357 }
358
359 int32_t 
360 hammer_alist_alloc_rev(hammer_alist_t live, int32_t count, int32_t atblk)
361 {
362         hammer_alist_config_t bl = live->config;
363         int32_t blk = HAMMER_ALIST_BLOCK_NONE;
364
365         KKASSERT((count | (count - 1)) == (count << 1) - 1);
366
367         if (bl && count < bl->bl_radix) {
368                 if (bl->bl_skip == 1 && bl->bl_terminal) {
369 #ifndef _KERNEL
370                         KKASSERT(bl->bl_radix == HAMMER_ALIST_BMAP_RADIX);
371 #endif
372                         blk = hammer_alst_leaf_alloc_rev(
373                                     live->meta + 1, 0, count, atblk);
374                 } else {
375                         blk = hammer_alst_meta_alloc_rev(
376                                     live, live->meta + 1,
377                                     0, count, bl->bl_radix, bl->bl_skip, atblk);
378                 }
379                 if (blk != HAMMER_ALIST_BLOCK_NONE)
380                         live->meta->bm_alist_freeblks -= count;
381         }
382         return(blk);
383 }
384
385 /*
386  * alist_free()
387  *
388  *      Free up space in the block bitmap.  Return the base of a contiguous
389  *      region.  Panic if an inconsistancy is found.
390  *
391  *      Unlike allocations, there are no alignment requirements for blkno or
392  *      count when freeing blocks.
393  */
394
395 void 
396 hammer_alist_free(hammer_alist_t live, int32_t blkno, int32_t count)
397 {
398         hammer_alist_config_t bl = live->config;
399
400         KKASSERT(blkno + count <= bl->bl_blocks);
401         if (bl->bl_skip == 1 && bl->bl_terminal) {
402 #ifndef _KERNEL
403                 KKASSERT(bl->bl_radix == HAMMER_ALIST_BMAP_RADIX);
404 #endif
405                 hammer_alst_leaf_free(live->meta + 1, blkno, count);
406         } else {
407                 hammer_alst_meta_free(live, live->meta + 1,
408                                       blkno, count,
409                                       bl->bl_radix, bl->bl_skip, 0);
410         }
411         live->meta->bm_alist_freeblks += count;
412 }
413
414 int
415 hammer_alist_isfull(hammer_alist_t live)
416 {
417         return(live->meta->bm_alist_freeblks == 0);
418 }
419
420 int
421 hammer_alist_isempty(hammer_alist_t live)
422 {
423         return(live->meta->bm_alist_freeblks == live->config->bl_radix);
424 }
425
426 #ifdef ALIST_DEBUG
427
428 /*
429  * alist_print()    - dump radix tree
430  */
431
432 void
433 hammer_alist_print(hammer_alist_t live, int tab)
434 {
435         hammer_alist_config_t bl = live->config;
436
437         kprintf("%*.*sALIST (%d free blocks) {\n",
438                 tab, tab, "", live->meta->bm_alist_freeblks);
439         hammer_alst_radix_print(live, live->meta + 1, 0,
440                                 bl->bl_radix, bl->bl_skip, tab + 4);
441         kprintf("%*.*s}\n", tab, tab, "");
442 }
443
444 #endif
445
446 /************************************************************************
447  *                        ALLOCATION SUPPORT FUNCTIONS                  *
448  ************************************************************************
449  *
450  *      These support functions do all the actual work.  They may seem 
451  *      rather longish, but that's because I've commented them up.  The
452  *      actual code is straight forward.
453  *
454  */
455
456 /*
457  * hammer_alist_leaf_alloc_fwd()
458  *
459  *      Allocate at a leaf in the radix tree (a bitmap).
460  *
461  *      This is the core of the allocator and is optimized for the 1 block
462  *      and the HAMMER_ALIST_BMAP_RADIX block allocation cases.  Other cases
463  *      are somewhat slower.  The 1 block allocation case is log2 and extremely
464  *      quick.
465  */
466
467 static int32_t
468 hammer_alst_leaf_alloc_fwd(hammer_almeta_t scan, int32_t blk,
469                            int count, int32_t atblk)
470 {
471         u_int32_t orig = scan->bm_bitmap;
472         int32_t saveblk = blk;
473
474         /*
475          * Optimize bitmap all-allocated case.  Also, count = 1
476          * case assumes at least 1 bit is free in the bitmap, so
477          * we have to take care of this case here.
478          */
479         if (orig == 0) {
480                 scan->bm_bighint = 0;
481                 return(HAMMER_ALIST_BLOCK_NONE);
482         }
483
484 #if 0
485         /*
486          * Optimized code to allocate one bit out of the bitmap
487          *
488          * mask iterates e.g. 00001111 00000011 00000001
489          *
490          * mask starts at 00001111
491          */
492         if (count == 1) {
493                 u_int32_t mask;
494                 int j = HAMMER_ALIST_BMAP_RADIX/2;
495                 int r = 0;
496
497                 mask = (u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX/2);
498
499                 while (j) {
500                         if ((orig & mask) == 0) {
501                             r += j;
502                             orig >>= j;
503                         }
504                         j >>= 1;
505                         mask >>= j;
506                 }
507                 scan->bm_bitmap &= ~(1 << r);
508                 return(blk + r);
509         }
510 #endif
511
512         /*
513          * non-optimized code to allocate N bits out of the bitmap.
514          * The more bits, the faster the code runs.  It will run
515          * the slowest allocating 2 bits, but since there aren't any
516          * memory ops in the core loop (or shouldn't be, anyway),
517          * you probably won't notice the difference.
518          *
519          * Similar to the blist case, the alist code also requires
520          * allocations to be power-of-2 sized and aligned to the
521          * size of the allocation, which simplifies the algorithm.
522          */
523         {
524                 int j;
525                 int n = HAMMER_ALIST_BMAP_RADIX - count;
526                 u_int32_t mask;
527
528                 mask = (u_int32_t)-1 >> n;
529
530                 for (j = 0; j <= n; j += count) {
531                         if ((orig & mask) == mask && blk >= atblk) {
532                                 scan->bm_bitmap &= ~mask;
533                                 return(blk);
534                         }
535                         mask = mask << count;
536                         blk += count;
537                 }
538         }
539
540         /*
541          * We couldn't allocate count in this subtree, update bighint if
542          * atblk didn't interfere with the hinting mechanism.
543          */
544         if (saveblk >= atblk)
545                 scan->bm_bighint = count - 1;
546         return(HAMMER_ALIST_BLOCK_NONE);
547 }
548
549 /*
550  * This version allocates blocks in the reverse direction.
551  */
552 static int32_t
553 hammer_alst_leaf_alloc_rev(hammer_almeta_t scan, int32_t blk,
554                            int count, int32_t atblk)
555 {
556         u_int32_t orig = scan->bm_bitmap;
557         int32_t saveblk;
558
559         /*
560          * Optimize bitmap all-allocated case.  Also, count = 1
561          * case assumes at least 1 bit is free in the bitmap, so
562          * we have to take care of this case here.
563          */
564         if (orig == 0) {
565                 scan->bm_bighint = 0;
566                 return(HAMMER_ALIST_BLOCK_NONE);
567         }
568
569 #if 0
570         /*
571          * Optimized code to allocate one bit out of the bitmap
572          */
573         if (count == 1) {
574                 u_int32_t mask;
575                 int j = HAMMER_ALIST_BMAP_RADIX/2;
576                 int r = HAMMER_ALIST_BMAP_RADIX - 1;
577
578                 mask = ~((u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX/2));
579
580                 while (j) {
581                         if ((orig & mask) == 0) {
582                             r -= j;
583                             orig <<= j;
584                         }
585                         j >>= 1;
586                         mask <<= j;
587                 }
588                 scan->bm_bitmap &= ~(1 << r);
589                 return(blk + r);
590         }
591 #endif
592
593         /*
594          * non-optimized code to allocate N bits out of the bitmap.
595          * The more bits, the faster the code runs.  It will run
596          * the slowest allocating 2 bits, but since there aren't any
597          * memory ops in the core loop (or shouldn't be, anyway),
598          * you probably won't notice the difference.
599          *
600          * Similar to the blist case, the alist code also requires
601          * allocations to be power-of-2 sized and aligned to the
602          * size of the allocation, which simplifies the algorithm.
603          *
604          * initial mask if count == 2:  1100....0000
605          */
606         {
607                 int j;
608                 int n = HAMMER_ALIST_BMAP_RADIX - count;
609                 u_int32_t mask;
610
611                 mask = ((u_int32_t)-1 >> n) << n;
612                 blk += n;
613                 saveblk = blk;
614
615                 for (j = n; j >= 0; j -= count) {
616                         if ((orig & mask) == mask && blk <= atblk) {
617                                 scan->bm_bitmap &= ~mask;
618                                 return(blk);
619                         }
620                         mask = mask >> count;
621                         blk -= count;
622                 }
623         }
624
625         /*
626          * We couldn't allocate count in this subtree, update bighint if
627          * atblk didn't interfere with it.
628          */
629         if (saveblk <= atblk)
630                 scan->bm_bighint = count - 1;
631         return(HAMMER_ALIST_BLOCK_NONE);
632 }
633
634 /*
635  * hammer_alist_meta_alloc_fwd()
636  *
637  *      Allocate at a meta in the radix tree.
638  *
639  *      Attempt to allocate at a meta node.  If we can't, we update
640  *      bighint and return a failure.  Updating bighint optimize future
641  *      calls that hit this node.  We have to check for our collapse cases
642  *      and we have a few optimizations strewn in as well.
643  */
644 static int32_t
645 hammer_alst_meta_alloc_fwd(hammer_alist_t live, hammer_almeta_t scan,
646                            int32_t blk, int32_t count,
647                            int32_t radix, int skip, int32_t atblk
648 ) {
649         hammer_alist_config_t bl;
650         u_int32_t mask;
651         u_int32_t pmask;
652         int32_t saveblk;
653         int next_skip;
654         int i;
655
656         /*
657          * ALL-ALLOCATED special case
658          */
659         if (scan->bm_bitmap == 0)  {
660                 scan->bm_bighint = 0;
661                 return(HAMMER_ALIST_BLOCK_NONE);
662         }
663
664         radix /= HAMMER_ALIST_META_RADIX;
665         bl = live->config;
666
667         /*
668          * Radix now represents each bitmap entry for this meta node.  If
669          * the number of blocks being allocated can be fully represented,
670          * we allocate directly out of this meta node.
671          *
672          * Meta node bitmaps use 2 bits per block.
673          *
674          *      00      ALL-ALLOCATED
675          *      01      PARTIALLY-FREE/PARTIALLY-ALLOCATED
676          *      10      (RESERVED)
677          *      11      ALL-FREE
678          */
679         if (count >= radix) {
680                 int n = count / radix * 2;      /* number of bits */
681                 int nd2 = n / 2;
682                 int j;
683
684                 mask = (u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX - n);
685                 saveblk = blk;
686                 for (j = 0; j < HAMMER_ALIST_META_RADIX; j += nd2) {
687                         if ((scan->bm_bitmap & mask) == mask && blk >= atblk) {
688                                 scan->bm_bitmap &= ~mask;
689                                 return(blk);
690                         }
691                         mask <<= n;
692                         blk += radix * nd2;
693                 }
694                 if (scan->bm_bighint >= count && saveblk >= atblk)
695                         scan->bm_bighint = count >> 1;
696                 return(HAMMER_ALIST_BLOCK_NONE);
697         }
698
699         /*
700          * If the count is too big we couldn't allocate anything from a
701          * recursion even if the sub-tree were entirely free.
702          */
703         saveblk = blk;
704         if (count > radix)
705                 goto failed;
706
707         /*
708          * If not we have to recurse.
709          */
710         mask = 0x00000003;
711         pmask = 0x00000001;
712
713         if (skip == 1) {
714                 /*
715                  * If skip is 1 we are a meta leaf node, which means there
716                  * is another allocation layer we have to dive down into.
717                  */
718                 for (i = 0; i < HAMMER_ALIST_META_RADIX; ++i) {
719                         /*
720                          * If the next layer is completely free then call
721                          * its init function to initialize it, then free
722                          * all of its blocks before trying to allocate from
723                          * it.
724                          *
725                          * bl_radix_init returns an error code or 0 on
726                          * success.
727                          */
728                         if ((scan->bm_bitmap & mask) == mask &&
729                             blk + radix > atblk) {
730                                 if (bl->bl_radix_init(live->info, blk, radix) == 0) {
731                                         int32_t empty;
732                                         scan->bm_bitmap &= ~mask;
733                                         scan->bm_bitmap |= pmask;
734                                         bl->bl_radix_free(live->info, blk,
735                                                           radix, 0, radix,
736                                                           &empty);
737                                 }
738                         }
739
740                         /*
741                          * If there may be some free blocks try to allocate
742                          * out of the layer.  If the layer indicates that
743                          * it is completely full then clear both bits to
744                          * propogate the condition.
745                          */
746                         if ((scan->bm_bitmap & mask) == pmask &&
747                             blk + radix > atblk) {
748                                 int32_t r;
749                                 int32_t full;
750
751                                 r = bl->bl_radix_alloc_fwd(live->info, blk,
752                                                            radix, count, atblk,
753                                                            &full);
754                                 if (r != HAMMER_ALIST_BLOCK_NONE) {
755                                         if (full) {
756                                                 scan->bm_bitmap &= ~mask;
757                                                 bl->bl_radix_destroy(live->info, blk, radix);
758                                         }
759                                         return(r);
760                                 }
761                         }
762                         blk += radix;
763                         mask <<= 2;
764                         pmask <<= 2;
765                 }
766         } else {
767                 /*
768                  * Otherwise there are sub-records in the current a-list
769                  * layer.  We can also peek into the sub-layers to get
770                  * more accurate size hints.
771                  */
772                 next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
773                 for (i = 1; i <= skip; i += next_skip) {
774                         if (scan[i].bm_bighint == (int32_t)-1) {
775                                 /* 
776                                  * Terminator
777                                  */
778                                 break;
779                         }
780
781                         /*
782                          * Initialize bitmap if allocating from the all-free
783                          * case.
784                          */
785                         if ((scan->bm_bitmap & mask) == mask) {
786                                 scan[i].bm_bitmap = (u_int32_t)-1;
787                                 scan[i].bm_bighint = radix;
788                         }
789
790                         if (count <= scan[i].bm_bighint &&
791                             blk + radix > atblk) {
792                                 /*
793                                  * count fits in object, recurse into the
794                                  * next layer.  If the next_skip is 1 it
795                                  * will be either a normal leaf or a meta
796                                  * leaf.
797                                  */
798                                 int32_t r;
799
800                                 if (next_skip == 1 && bl->bl_terminal) {
801                                         r = hammer_alst_leaf_alloc_fwd(
802                                                 &scan[i], blk, count, atblk);
803                                 } else {
804                                         r = hammer_alst_meta_alloc_fwd(
805                                                 live, &scan[i],
806                                                 blk, count,
807                                                 radix, next_skip, atblk);
808                                 }
809                                 if (r != HAMMER_ALIST_BLOCK_NONE) {
810                                         if (scan[i].bm_bitmap == 0) {
811                                                 scan->bm_bitmap &= ~mask;
812                                         } else {
813                                                 scan->bm_bitmap &= ~mask;
814                                                 scan->bm_bitmap |= pmask;
815                                         }
816                                         return(r);
817                                 }
818                         }
819                         blk += radix;
820                         mask <<= 2;
821                         pmask <<= 2;
822                 }
823         }
824
825 failed:
826         /*
827          * We couldn't allocate count in this subtree, update bighint.
828          */
829         if (scan->bm_bighint >= count && saveblk >= atblk)
830                 scan->bm_bighint = count >> 1;
831         return(HAMMER_ALIST_BLOCK_NONE);
832 }
833
834 /*
835  * This version allocates blocks in the reverse direction.
836  */
837 static int32_t
838 hammer_alst_meta_alloc_rev(hammer_alist_t live, hammer_almeta_t scan,
839                            int32_t blk, int32_t count,
840                            int32_t radix, int skip, int32_t atblk
841 ) {
842         hammer_alist_config_t bl;
843         int i;
844         int j;
845         u_int32_t mask;
846         u_int32_t pmask;
847         int32_t saveblk;
848         int next_skip;
849
850         /*
851          * ALL-ALLOCATED special case
852          */
853         if (scan->bm_bitmap == 0)  {
854                 scan->bm_bighint = 0;
855                 return(HAMMER_ALIST_BLOCK_NONE);
856         }
857
858         radix /= HAMMER_ALIST_META_RADIX;
859         bl = live->config;
860
861         /*
862          * Radix now represents each bitmap entry for this meta node.  If
863          * the number of blocks being allocated can be fully represented,
864          * we allocate directly out of this meta node.
865          *
866          * Meta node bitmaps use 2 bits per block.
867          *
868          *      00      ALL-ALLOCATED
869          *      01      PARTIALLY-FREE/PARTIALLY-ALLOCATED
870          *      10      (RESERVED)
871          *      11      ALL-FREE
872          */
873         if (count >= radix) {
874                 int n = count / radix * 2;      /* number of bits */
875                 int nd2 = n / 2;                /* number of radi */
876                 int j;
877
878                 /*
879                  * Initial mask if e.g. n == 2:  1100....0000
880                  */
881                 mask = (u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX - n) <<
882                         (HAMMER_ALIST_BMAP_RADIX - n);
883                 blk += (HAMMER_ALIST_META_RADIX - nd2) * radix;
884                 saveblk = blk;
885                 for (j = HAMMER_ALIST_META_RADIX - nd2; j >= 0; j -= nd2) {
886                         if ((scan->bm_bitmap & mask) == mask && blk <= atblk) {
887                                 scan->bm_bitmap &= ~mask;
888                                 return(blk);
889                         }
890                         mask >>= n;
891                         blk -= nd2 * radix;
892                 }
893                 if (scan->bm_bighint >= count && saveblk <= atblk)
894                         scan->bm_bighint = count >> 1;
895                 return(HAMMER_ALIST_BLOCK_NONE);
896         }
897
898         /*
899          * If the count is too big we couldn't allocate anything from a
900          * recursion even if the sub-tree were entirely free.
901          */
902         if (count > radix) {
903                 saveblk = atblk;        /* make it work for the conditional */
904                 goto failed;            /* at the failed label */
905         }
906
907         if (skip == 1) {
908                 /*
909                  * We need the recurse but we are at a meta node leaf, which
910                  * means there is another layer under us.
911                  */
912                 mask = 0xC0000000;
913                 pmask = 0x40000000;
914                 blk += radix * HAMMER_ALIST_META_RADIX - radix;
915                 saveblk = blk;
916
917                 for (i = 0; i < HAMMER_ALIST_META_RADIX; ++i) {
918                         /*
919                          * If the next layer is completely free then call
920                          * its init function to initialize it and mark it
921                          * partially free.
922                          */
923                         if ((scan->bm_bitmap & mask) == mask && blk <= atblk) {
924                                 if (bl->bl_radix_init(live->info, blk, radix) == 0) {
925                                         int32_t empty;
926                                         scan->bm_bitmap &= ~mask;
927                                         scan->bm_bitmap |= pmask;
928                                         bl->bl_radix_free(live->info, blk,
929                                                           radix, 0, radix,
930                                                           &empty);
931                                 }
932                         }
933
934                         /*
935                          * If there may be some free blocks try to allocate
936                          * out of the layer.  If the layer indicates that
937                          * it is completely full then clear both bits to
938                          * propogate the condition.
939                          */
940                         if ((scan->bm_bitmap & mask) == pmask && blk <= atblk) {
941                                 int32_t r;
942                                 int32_t full;
943
944                                 r = bl->bl_radix_alloc_rev(live->info, blk,
945                                                            radix, count,
946                                                            atblk, &full);
947                                 if (r != HAMMER_ALIST_BLOCK_NONE) {
948                                         if (full) {
949                                                 scan->bm_bitmap &= ~mask;
950                                                 bl->bl_radix_destroy(live->info, blk, radix);
951                                         }
952                                         return(r);
953                                 }
954                         }
955                         mask >>= 2;
956                         pmask >>= 2;
957                         blk -= radix;
958                 }
959         } else {
960                 /*
961                  * Since we are going in the reverse direction we need an
962                  * extra loop to determine if there is a terminator, then
963                  * run backwards.
964                  *
965                  * This is a little weird but we do not want to overflow the
966                  * mask/pmask in the loop.
967                  */
968                 next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
969                 j = 0;
970                 for (i = 1; i < skip; i += next_skip) {
971                         if (scan[i].bm_bighint == (int32_t)-1) {
972                                 KKASSERT(j != 0);
973                                 break;
974                         }
975                         blk += radix;
976                         j += 2;
977                 }
978                 blk -= radix;
979                 j -= 2;
980                 mask = 0x00000003 << j;
981                 pmask = 0x00000001 << j;
982                 i -= next_skip;
983                 saveblk = blk;
984
985                 while (i >= 1) {
986                         /*
987                          * Initialize the bitmap in the child if allocating
988                          * from the all-free case.
989                          */
990                         if ((scan->bm_bitmap & mask) == mask) {
991                                 scan[i].bm_bitmap = (u_int32_t)-1;
992                                 scan[i].bm_bighint = radix;
993                         }
994
995                         /*
996                          * Handle various count cases.  Bighint may be too
997                          * large but is never too small.
998                          */
999                         if (count <= scan[i].bm_bighint && blk <= atblk) {
1000                                 /*
1001                                  * count fits in object
1002                                  */
1003                                 int32_t r;
1004                                 if (next_skip == 1 && bl->bl_terminal) {
1005                                         r = hammer_alst_leaf_alloc_rev(
1006                                                 &scan[i], blk, count, atblk);
1007                                 } else {
1008                                         r = hammer_alst_meta_alloc_rev(
1009                                                 live, &scan[i],
1010                                                 blk, count,
1011                                                 radix, next_skip, atblk);
1012                                 }
1013                                 if (r != HAMMER_ALIST_BLOCK_NONE) {
1014                                         if (scan[i].bm_bitmap == 0) {
1015                                                 scan->bm_bitmap &= ~mask;
1016                                         } else {
1017                                                 scan->bm_bitmap &= ~mask;
1018                                                 scan->bm_bitmap |= pmask;
1019                                         }
1020                                         return(r);
1021                                 }
1022                         }
1023                         blk -= radix;
1024                         mask >>= 2;
1025                         pmask >>= 2;
1026                         i -= next_skip;
1027                 }
1028         }
1029
1030 failed:
1031         /*
1032          * We couldn't allocate count in this subtree, update bighint.
1033          * Since we are restricted to powers of 2, the next highest count
1034          * we might be able to allocate is (count >> 1).
1035          */
1036         if (scan->bm_bighint >= count && saveblk <= atblk)
1037                 scan->bm_bighint = count >> 1;
1038         return(HAMMER_ALIST_BLOCK_NONE);
1039 }
1040
1041 /*
1042  * BLST_LEAF_FREE()
1043  *
1044  *      Free allocated blocks from leaf bitmap.  The allocation code is
1045  *      restricted to powers of 2, the freeing code is not.
1046  */
1047 static void
1048 hammer_alst_leaf_free(hammer_almeta_t scan, int32_t blk, int count) {
1049         /*
1050          * free some data in this bitmap
1051          *
1052          * e.g.
1053          *      0000111111111110000
1054          *          \_________/\__/
1055          *              v        n
1056          */
1057         int n = blk & (HAMMER_ALIST_BMAP_RADIX - 1);
1058         u_int32_t mask;
1059
1060         mask = ((u_int32_t)-1 << n) &
1061             ((u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX - count - n));
1062
1063         if (scan->bm_bitmap & mask)
1064                 panic("hammer_alst_radix_free: freeing free block");
1065         scan->bm_bitmap |= mask;
1066
1067         /*
1068          * We could probably do a better job here.  We are required to make
1069          * bighint at least as large as the biggest contiguous block of 
1070          * data.  If we just shoehorn it, a little extra overhead will
1071          * be incured on the next allocation (but only that one typically).
1072          */
1073         scan->bm_bighint = HAMMER_ALIST_BMAP_RADIX;
1074 }
1075
1076 /*
1077  * BLST_META_FREE()
1078  *
1079  *      Free allocated blocks from radix tree meta info.
1080  *
1081  *      This support routine frees a range of blocks from the bitmap.
1082  *      The range must be entirely enclosed by this radix node.  If a
1083  *      meta node, we break the range down recursively to free blocks
1084  *      in subnodes (which means that this code can free an arbitrary
1085  *      range whereas the allocation code cannot allocate an arbitrary
1086  *      range).
1087  */
1088
1089 static void 
1090 hammer_alst_meta_free(hammer_alist_t live, hammer_almeta_t scan, 
1091                       int32_t freeBlk, int32_t count,
1092                       int32_t radix, int skip, int32_t blk
1093 ) {
1094         hammer_alist_config_t bl;
1095         int next_skip;
1096         u_int32_t mask;
1097         u_int32_t pmask;
1098         int i;
1099
1100         /*
1101          * Break the free down into its components.  Because it is so easy
1102          * to implement, frees are not limited to power-of-2 sizes.
1103          *
1104          * Each block in a meta-node bitmap takes two bits.
1105          */
1106         radix /= HAMMER_ALIST_META_RADIX;
1107         bl = live->config;
1108
1109         i = (freeBlk - blk) / radix;
1110         blk += i * radix;
1111         mask = 0x00000003 << (i * 2);
1112         pmask = 0x00000001 << (i * 2);
1113
1114         if (skip == 1) {
1115                 /*
1116                  * Our meta node is a leaf node, which means it must recurse
1117                  * into another allocator.
1118                  */
1119                 while (i < HAMMER_ALIST_META_RADIX && blk < freeBlk + count) {
1120                         int32_t v;
1121
1122                         v = blk + radix - freeBlk;
1123                         if (v > count)
1124                                 v = count;
1125
1126                         if (scan->bm_bighint == (int32_t)-1)
1127                                 panic("hammer_alst_meta_free: freeing unexpected range");
1128
1129                         if (freeBlk == blk && count >= radix) {
1130                                 /*
1131                                  * When the sub-tree becomes entirely free
1132                                  * we have to destroy it if it was previously
1133                                  * partially allocated.  If it was previously
1134                                  * fully allocated it has already been
1135                                  * destroyed (or never inited in the first
1136                                  * place).
1137                                  */
1138                                 int32_t empty;
1139
1140                                 if ((scan->bm_bitmap & mask) != 0) {
1141                                         bl->bl_radix_free(live->info, blk, radix,
1142                                                           freeBlk - blk, v, &empty);
1143                                         bl->bl_radix_destroy(live->info, blk, radix);
1144                                 }
1145                                 scan->bm_bitmap |= mask;
1146                                 scan->bm_bighint = radix * HAMMER_ALIST_META_RADIX;
1147                                 /* XXX bighint not being set properly */
1148                         } else {
1149                                 /*
1150                                  * Recursion case.  If freeing from an 
1151                                  * all-allocated state init the sub-tree
1152                                  * first.
1153                                  */
1154                                 int32_t empty;
1155
1156                                 if ((scan->bm_bitmap & mask) == 0)
1157                                         bl->bl_radix_init(live->info, blk, radix);
1158                                 bl->bl_radix_free(live->info, blk, radix,
1159                                                   freeBlk - blk, v, &empty);
1160                                 if (empty) {
1161                                         scan->bm_bitmap |= mask;
1162                                         scan->bm_bighint = radix * HAMMER_ALIST_META_RADIX;
1163                                         bl->bl_radix_destroy(live->info, blk, radix);
1164                                         /* XXX bighint not being set properly */
1165                                 } else {
1166                                         scan->bm_bitmap |= pmask;
1167                                         if (scan->bm_bighint < radix / 2)
1168                                                 scan->bm_bighint = radix / 2;
1169                                         /* XXX bighint not being set properly */
1170                                 }
1171                         }
1172                         ++i;
1173                         mask <<= 2;
1174                         pmask <<= 2;
1175                         count -= v;
1176                         freeBlk += v;
1177                         blk += radix;
1178                 }
1179         } else {
1180                 next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
1181                 i = 1 + i * next_skip;
1182
1183                 while (i <= skip && blk < freeBlk + count) {
1184                         int32_t v;
1185
1186                         v = blk + radix - freeBlk;
1187                         if (v > count)
1188                                 v = count;
1189
1190                         if (scan->bm_bighint == (int32_t)-1)
1191                                 panic("hammer_alst_meta_free: freeing unexpected range");
1192
1193                         if (freeBlk == blk && count >= radix) {
1194                                 /*
1195                                  * All-free case, no need to update sub-tree
1196                                  */
1197                                 scan->bm_bitmap |= mask;
1198                                 scan->bm_bighint = radix * HAMMER_ALIST_META_RADIX;
1199                                 /* XXX bighint not being set properly */
1200                         } else {
1201                                 /*
1202                                  * Recursion case
1203                                  */
1204                                 if (next_skip == 1 && bl->bl_terminal) {
1205                                         hammer_alst_leaf_free(&scan[i], freeBlk, v);
1206                                 } else {
1207                                         hammer_alst_meta_free(live, &scan[i],
1208                                                               freeBlk, v,
1209                                                               radix, next_skip,
1210                                                               blk);
1211                                 }
1212                                 if (scan[i].bm_bitmap == (u_int32_t)-1)
1213                                         scan->bm_bitmap |= mask;
1214                                 else
1215                                         scan->bm_bitmap |= pmask;
1216                                 if (scan->bm_bighint < scan[i].bm_bighint)
1217                                         scan->bm_bighint = scan[i].bm_bighint;
1218                         }
1219                         mask <<= 2;
1220                         pmask <<= 2;
1221                         count -= v;
1222                         freeBlk += v;
1223                         blk += radix;
1224                         i += next_skip;
1225                 }
1226         }
1227 }
1228
1229 /*
1230  * hammer_alst_radix_init()
1231  *
1232  *      Initialize our meta structures and bitmaps and calculate the exact
1233  *      number of meta-nodes required to manage 'count' blocks.  
1234  *
1235  *      The required space may be truncated due to early termination records.
1236  */
1237 static int32_t  
1238 hammer_alst_radix_init(hammer_almeta_t scan, int32_t radix,
1239                        int skip, int32_t count)
1240 {
1241         int i;
1242         int next_skip;
1243         int32_t memindex = 1;
1244         u_int32_t mask;
1245         u_int32_t pmask;
1246
1247         /*
1248          * Basic initialization of the almeta for meta or leaf nodes
1249          */
1250         if (scan) {
1251                 scan->bm_bighint = 0;
1252                 scan->bm_bitmap = 0;
1253         }
1254
1255         /*
1256          * We are at a leaf, we only eat one meta element. 
1257          */
1258         if (skip == 1)
1259                 return(memindex);
1260
1261         /*
1262          * Meta node.  If allocating the entire object we can special
1263          * case it.  However, we need to figure out how much memory
1264          * is required to manage 'count' blocks, so we continue on anyway.
1265          */
1266         radix /= HAMMER_ALIST_META_RADIX;
1267         next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
1268         mask = 0x00000003;
1269         pmask = 0x00000001;
1270
1271         for (i = 1; i <= skip; i += next_skip) {
1272                 /*
1273                  * We eat up to this record
1274                  */
1275                 memindex = i;
1276
1277                 if (count >= radix) {
1278                         /*
1279                          * Allocate the entire object
1280                          */
1281                         memindex += hammer_alst_radix_init(
1282                             ((scan) ? &scan[i] : NULL),
1283                             radix,
1284                             next_skip,
1285                             radix
1286                         );
1287                         count -= radix;
1288                         /* already marked as wholely allocated */
1289                 } else if (count > 0) {
1290                         /*
1291                          * Allocate a partial object
1292                          */
1293                         memindex += hammer_alst_radix_init(
1294                             ((scan) ? &scan[i] : NULL),
1295                             radix,
1296                             next_skip,
1297                             count
1298                         );
1299                         count = 0;
1300
1301                         /*
1302                          * Mark as partially allocated
1303                          */
1304                         if (scan)
1305                                 scan->bm_bitmap |= pmask;
1306                 } else {
1307                         /*
1308                          * Add terminator and break out.  The terminal
1309                          * eats the meta node at scan[i].
1310                          */
1311                         ++memindex;
1312                         if (scan)
1313                                 scan[i].bm_bighint = (int32_t)-1;
1314                         /* already marked as wholely allocated */
1315                         break;
1316                 }
1317                 mask <<= 2;
1318                 pmask <<= 2;
1319         }
1320         return(memindex);
1321 }
1322
1323 #ifdef ALIST_DEBUG
1324
1325 static void     
1326 hammer_alst_radix_print(hammer_alist_t live, hammer_almeta_t scan,
1327                         int32_t blk, int32_t radix, int skip, int tab)
1328 {
1329         int i;
1330         int next_skip;
1331         int lastState = 0;
1332         u_int32_t mask;
1333
1334         if (skip == 1 && live->config->bl_terminal) {
1335                 kprintf(
1336                     "%*.*s(%04x,%d): bitmap %08x big=%d\n", 
1337                     tab, tab, "",
1338                     blk, radix,
1339                     scan->bm_bitmap,
1340                     scan->bm_bighint
1341                 );
1342                 return;
1343         }
1344
1345         if (scan->bm_bitmap == 0) {
1346                 kprintf(
1347                     "%*.*s(%04x,%d) ALL ALLOCATED\n",
1348                     tab, tab, "",
1349                     blk,
1350                     radix
1351                 );
1352                 return;
1353         }
1354         if (scan->bm_bitmap == (u_int32_t)-1) {
1355                 kprintf(
1356                     "%*.*s(%04x,%d) ALL FREE\n",
1357                     tab, tab, "",
1358                     blk,
1359                     radix
1360                 );
1361                 return;
1362         }
1363
1364         kprintf(
1365             "%*.*s(%04x,%d): %s (%d) bitmap=%08x big=%d {\n",
1366             tab, tab, "",
1367             blk, radix,
1368             (skip == 1 ? "LAYER" : "subtree"),
1369             radix,
1370             scan->bm_bitmap,
1371             scan->bm_bighint
1372         );
1373
1374         radix /= HAMMER_ALIST_META_RADIX;
1375         tab += 4;
1376         mask = 0x00000003;
1377
1378         if (skip == 1) {
1379                 for (i = 0; i < HAMMER_ALIST_META_RADIX; ++i) {
1380                         if ((scan->bm_bitmap & mask) == mask) {
1381                                 kprintf(
1382                                     "%*.*s(%04x,%d): ALL FREE\n",
1383                                     tab, tab, "",
1384                                     blk, radix
1385                                 );
1386                         } else if ((scan->bm_bitmap & mask) == 0) {
1387                                 kprintf(
1388                                     "%*.*s(%04x,%d): ALL ALLOCATED\n",
1389                                     tab, tab, "",
1390                                     blk, radix
1391                                 );
1392                         } else {
1393                                 live->config->bl_radix_print(
1394                                                 live->info, blk, radix, tab);
1395                         }
1396                         blk += radix;
1397                         mask <<= 2;
1398                 }
1399         } else {
1400                 next_skip = ((u_int)skip / HAMMER_ALIST_META_RADIX);
1401
1402                 for (i = 1; i <= skip; i += next_skip) {
1403                         if (scan[i].bm_bighint == (int32_t)-1) {
1404                                 kprintf(
1405                                     "%*.*s(%04x,%d): Terminator\n",
1406                                     tab, tab, "",
1407                                     blk, radix
1408                                 );
1409                                 lastState = 0;
1410                                 break;
1411                         }
1412                         if ((scan->bm_bitmap & mask) == mask) {
1413                                 kprintf(
1414                                     "%*.*s(%04x,%d): ALL FREE\n",
1415                                     tab, tab, "",
1416                                     blk, radix
1417                                 );
1418                         } else if ((scan->bm_bitmap & mask) == 0) {
1419                                 kprintf(
1420                                     "%*.*s(%04x,%d): ALL ALLOCATED\n",
1421                                     tab, tab, "",
1422                                     blk, radix
1423                                 );
1424                         } else {
1425                                 hammer_alst_radix_print(
1426                                     live,
1427                                     &scan[i],
1428                                     blk,
1429                                     radix,
1430                                     next_skip - 1,
1431                                     tab
1432                                 );
1433                         }
1434                         blk += radix;
1435                         mask <<= 2;
1436                 }
1437         }
1438         tab -= 4;
1439
1440         kprintf(
1441             "%*.*s}\n",
1442             tab, tab, ""
1443         );
1444 }
1445
1446 #endif
1447
1448 #ifdef ALIST_DEBUG
1449
1450 static struct hammer_alist_live **layers;       /* initialized by main */
1451 static int32_t layer_radix = -1;
1452
1453 static
1454 int
1455 debug_radix_init(void *info, int32_t blk, int32_t radix)
1456 {
1457         hammer_alist_t layer;
1458         int layer_no = blk / layer_radix;
1459
1460         printf("lower layer: init (%04x,%d)\n", blk, radix);
1461         KKASSERT(layers[layer_no] == NULL);
1462         layer = layers[layer_no] = hammer_alist_create(radix, 1, NULL); 
1463         return(0);
1464 }
1465
1466 static
1467 int
1468 debug_radix_destroy(void *info, int32_t blk, int32_t radix)
1469 {
1470         hammer_alist_t layer;
1471         int layer_no = blk / layer_radix;
1472
1473         printf("lower layer: destroy (%04x,%d)\n", blk, radix);
1474         layer = layers[layer_no];
1475         KKASSERT(layer != NULL);
1476         hammer_alist_destroy(layer, NULL);
1477         layers[layer_no] = NULL;
1478         return(0);
1479 }
1480
1481
1482 static
1483 int32_t
1484 debug_radix_alloc_fwd(void *info, int32_t blk, int32_t radix,
1485                       int32_t count, int32_t atblk, int32_t *fullp)
1486 {
1487         hammer_alist_t layer = layers[blk / layer_radix];
1488         int32_t r;
1489
1490         r = hammer_alist_alloc_fwd(layer, count, atblk - blk);
1491         *fullp = (layer->meta->bm_alist_freeblks == 0);
1492         if (r != HAMMER_ALIST_BLOCK_NONE)
1493                 r += blk;
1494         return(r);
1495 }
1496
1497 static
1498 int32_t
1499 debug_radix_alloc_rev(void *info, int32_t blk, int32_t radix,
1500                       int32_t count, int32_t atblk, int32_t *fullp)
1501 {
1502         hammer_alist_t layer = layers[blk / layer_radix];
1503         int32_t r;
1504
1505         r = hammer_alist_alloc_rev(layer, count, atblk - blk);
1506         *fullp = (layer->meta->bm_alist_freeblks == 0);
1507         if (r != HAMMER_ALIST_BLOCK_NONE)
1508                 r += blk;
1509         return(r);
1510 }
1511
1512 static
1513 void
1514 debug_radix_free(void *info, int32_t blk, int32_t radix,
1515                  int32_t base_blk, int32_t count, int32_t *emptyp)
1516 {
1517         int layer_no = blk / layer_radix;
1518         hammer_alist_t layer = layers[layer_no];
1519
1520         if (layer == NULL) {
1521                 layer = hammer_alist_create(layer_radix, 1, NULL); 
1522                 layers[layer_no] = layer;
1523         }
1524         hammer_alist_free(layer, base_blk, count);
1525         *emptyp = (layer->meta->bm_alist_freeblks == layer_radix);
1526 }
1527
1528 static
1529 void
1530 debug_radix_print(void *info, int32_t blk, int32_t radix, int tab)
1531 {
1532         hammer_alist_t layer = layers[blk / layer_radix];
1533
1534         hammer_alist_print(layer, tab);
1535 }
1536
1537 int
1538 main(int ac, char **av)
1539 {
1540         int32_t size = -1;
1541         int i;
1542         hammer_alist_t live;
1543         hammer_almeta_t meta = NULL;
1544
1545         for (i = 1; i < ac; ++i) {
1546                 const char *ptr = av[i];
1547                 if (*ptr != '-') {
1548                         if (size == -1)
1549                                 size = strtol(ptr, NULL, 0);
1550                         else if (layer_radix == -1)
1551                                 layer_radix = strtol(ptr, NULL, 0);
1552                         else
1553                                 ;
1554                         continue;
1555                 }
1556                 ptr += 2;
1557                 fprintf(stderr, "Bad option: %s\n", ptr - 2);
1558                 exit(1);
1559         }
1560         if (size == -1)
1561                 size = 1024;
1562         if (layer_radix == -1)
1563                 layer_radix = 1;        /* no second storage layer */
1564         if ((size ^ (size - 1)) != (size << 1) - 1) {
1565                 fprintf(stderr, "size must be a power of 2\n");
1566                 exit(1);
1567         }
1568         if ((layer_radix ^ (layer_radix - 1)) != (layer_radix << 1) - 1) {
1569                 fprintf(stderr, "the second layer radix must be a power of 2\n");
1570                 exit(1);
1571         }
1572
1573         live = hammer_alist_create(size, layer_radix, NULL);
1574         layers = calloc(size, sizeof(hammer_alist_t));
1575
1576         printf("A-LIST TEST %d blocks, first-layer radix %d, "
1577                "second-layer radix %d\n",
1578                 size, live->config->bl_radix / layer_radix, layer_radix);
1579
1580         live->config->bl_radix_init = debug_radix_init;
1581         live->config->bl_radix_destroy = debug_radix_destroy;
1582         live->config->bl_radix_alloc_fwd = debug_radix_alloc_fwd;
1583         live->config->bl_radix_alloc_rev = debug_radix_alloc_rev;
1584         live->config->bl_radix_free = debug_radix_free;
1585         live->config->bl_radix_print = debug_radix_print;
1586
1587         hammer_alist_free(live, 0, size);
1588
1589         for (;;) {
1590                 char buf[1024];
1591                 int32_t da = 0;
1592                 int32_t count = 0;
1593                 int32_t atblk;
1594                 int32_t blk;
1595
1596                 kprintf("%d/%d> ",
1597                         live->meta->bm_alist_freeblks, size);
1598                 fflush(stdout);
1599                 if (fgets(buf, sizeof(buf), stdin) == NULL)
1600                         break;
1601                 switch(buf[0]) {
1602                 case 'p':
1603                         hammer_alist_print(live, 0);
1604                         break;
1605                 case 'a':
1606                         if (sscanf(buf + 1, "%d %d", &count, &atblk) >= 1) {
1607                                 blk = hammer_alist_alloc_fwd(live, count, atblk);
1608                                 kprintf("    R=%04x\n", blk);
1609                         } else {
1610                                 kprintf("?\n");
1611                         }
1612                         break;
1613                 case 'r':
1614                         atblk = HAMMER_ALIST_BLOCK_MAX;
1615                         if (sscanf(buf + 1, "%d %d", &count, &atblk) >= 1) {
1616                                 blk = hammer_alist_alloc_rev(live, count, atblk);
1617                                 kprintf("    R=%04x\n", blk);
1618                         } else {
1619                                 kprintf("?\n");
1620                         }
1621                         break;
1622                 case 'f':
1623                         if (sscanf(buf + 1, "%x %d", &da, &count) == 2) {
1624                                 hammer_alist_free(live, da, count);
1625                         } else {
1626                                 kprintf("?\n");
1627                         }
1628                         break;
1629                 case '?':
1630                 case 'h':
1631                         puts(
1632                             "p          -print\n"
1633                             "a %d       -allocate\n"
1634                             "r %d       -allocate reverse\n"
1635                             "f %x %d    -free\n"
1636                             "h/?        -help"
1637                         );
1638                         break;
1639                 default:
1640                         kprintf("?\n");
1641                         break;
1642                 }
1643         }
1644         return(0);
1645 }
1646
1647 void
1648 panic(const char *ctl, ...)
1649 {
1650         __va_list va;
1651
1652         __va_start(va, ctl);
1653         vfprintf(stderr, ctl, va);
1654         fprintf(stderr, "\n");
1655         __va_end(va);
1656         exit(1);
1657 }
1658
1659 #endif
1660