HAMMER 18/many: Stabilization pass
[dragonfly.git] / sys / vfs / hammer / hammer_alist.c
1 /*
2  * HAMMER_ALIST.C -
3  *
4  * Bitmap allocator/deallocator, using a radix tree with hinting.
5  * Unlimited-size allocations, power-of-2 only, power-of-2 aligned results
6  * only.  This module has been separated from the generic kernel module and
7  * written specifically for embedding in HAMMER storage structures.
8  * 
9  * Copyright (c) 2007 The DragonFly Project.  All rights reserved.
10  * 
11  * This code is derived from software contributed to The DragonFly Project
12  * by Matthew Dillon <dillon@backplane.com>
13  * 
14  * Redistribution and use in source and binary forms, with or without
15  * modification, are permitted provided that the following conditions
16  * are met:
17  * 
18  * 1. Redistributions of source code must retain the above copyright
19  *    notice, this list of conditions and the following disclaimer.
20  * 2. Redistributions in binary form must reproduce the above copyright
21  *    notice, this list of conditions and the following disclaimer in
22  *    the documentation and/or other materials provided with the
23  *    distribution.
24  * 3. Neither the name of The DragonFly Project nor the names of its
25  *    contributors may be used to endorse or promote products derived
26  *    from this software without specific, prior written permission.
27  * 
28  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
29  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
30  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
31  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE
32  * COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
33  * INCIDENTAL, SPECIAL, EXEMPLARY OR CONSEQUENTIAL DAMAGES (INCLUDING,
34  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
35  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
36  * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
37  * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
38  * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
39  * SUCH DAMAGE.
40  *
41  * $DragonFly: src/sys/vfs/hammer/Attic/hammer_alist.c,v 1.8 2008/01/11 01:41:33 dillon Exp $
42  */
43 /*
44  * This module implements a generic allocator through the use of a hinted
45  * radix tree.  All allocations must be in powers of 2 and will return
46  * similarly aligned results.  The radix tree typically recurses within
47  * a memory buffer and then continues its recursion by chaining to other
48  * memory buffers, creating a seemless whole capable of managing any amount
49  * of storage.
50  *
51  * The radix tree is layed out recursively using a linear array.  Each meta
52  * node is immediately followed (layed out sequentially in memory) by
53  * HAMMER_ALIST_META_RADIX lower level nodes.  This is a recursive structure
54  * but one that can be easily scanned through a very simple 'skip'
55  * calculation.
56  *
57  * The radix tree supports an early-termination optimization which
58  * effectively allows us to efficiently mix large and small allocations
59  * with a single abstraction.  The address space can be partitioned
60  * arbitrarily without adding much in the way of additional meta-storage
61  * for the allocator.
62  *
63  * The radix tree supports allocator layering. By supplying a base_radix > 1
64  * the allocator will issue callbacks to recurse into lower layers once 
65  * higher layers have been exhausted.
66  *
67  * ALLOCATIONS IN MULTIPLES OF base_radix WILL BE ENTIRELY RETAINED IN THE
68  * HIGHER LEVEL ALLOCATOR AND NEVER RECURSE.  This means the init function
69  * will never be called and the A-list will consider the underlying zone
70  * as being uninitialized.  If you then do a partial free, the A-list will
71  * call the init function before freeing.  Most users of this API, including
72  * HAMMER, only allocate and free whole zones, or only allocate and free
73  * partial zones, and never mix their metaphors.
74  *
75  * This code can be compiled stand-alone for debugging.
76  */
77
78 #ifdef _KERNEL
79
80 #include <sys/param.h>
81 #include <sys/systm.h>
82 #include <sys/lock.h>
83 #include <sys/kernel.h>
84 #include <sys/malloc.h>
85 #include <vm/vm.h>
86 #include <vm/vm_object.h>
87 #include <vm/vm_kern.h>
88 #include <vm/vm_extern.h>
89 #include <vm/vm_page.h>
90
91 #include "hammer_alist.h"
92 #include "hammer_disk.h"
93
94 #else
95
96 #ifndef ALIST_NO_DEBUG
97 #define ALIST_DEBUG
98 #endif
99
100 #include <sys/types.h>
101 #include <stdio.h>
102 #include <assert.h>
103 #include <string.h>
104 #include <stdlib.h>
105 #include <stdarg.h>
106
107 #define kmalloc(a,b,c)  malloc(a)
108 #define kfree(a,b)      free(a)
109 #define kprintf         printf
110 #define KKASSERT(exp)   assert(exp)
111 struct malloc_type;
112
113 #include "hammer_alist.h"
114
115 void panic(const char *ctl, ...);
116
117 #endif
118
119 /*
120  * static support functions
121  */
122 static int32_t hammer_alst_leaf_alloc_fwd(hammer_almeta_t scan,
123                                         int32_t blk, int count, int32_t atblk);
124 static int32_t hammer_alst_meta_alloc_fwd(hammer_alist_t live,
125                                         hammer_almeta_t scan,
126                                         int32_t blk, int32_t count,
127                                         int32_t radix, int skip, int32_t atblk);
128 static int32_t hammer_alst_leaf_alloc_rev(hammer_almeta_t scan,
129                                         int32_t blk, int count, int32_t atblk);
130 static int32_t hammer_alst_meta_alloc_rev(hammer_alist_t live,
131                                         hammer_almeta_t scan,
132                                         int32_t blk, int32_t count,
133                                         int32_t radix, int skip, int32_t atblk);
134 static int32_t hammer_alst_find(hammer_alist_t live, hammer_almeta_t scan,
135                                         int32_t blk, int32_t radix,
136                                         int32_t skip, int32_t atblk);
137 static void hammer_alst_leaf_free(hammer_almeta_t scan, int32_t relblk,
138                                         int count);
139 static void hammer_alst_meta_free(hammer_alist_t live, hammer_almeta_t scan,
140                                         int32_t freeBlk, int32_t count, 
141                                         int32_t radix, int skip, int32_t blk);
142 static int32_t  hammer_alst_radix_init(hammer_almeta_t scan,
143                                         int32_t radix, int skip, int32_t count);
144 static void     hammer_alst_radix_recover(hammer_alist_recover_t info,
145                                         hammer_almeta_t scan, int32_t blk,
146                                         int32_t radix, int skip, int32_t count);
147 #ifdef ALIST_DEBUG
148 static void     hammer_alst_radix_print(hammer_alist_t live,
149                                         hammer_almeta_t scan, int32_t blk,
150                                         int32_t radix, int skip, int tab);
151 #endif
152
153 /*
154  * Initialize an a-list config structure for use.  The config structure
155  * describes the basic structure of an a-list's topology and may be
156  * shared by any number of a-lists which have the same topology.
157  *
158  * blocks is the total number of blocks, that is the number of blocks
159  * handled at this layer multiplied by the base radix.
160  *
161  * When base_radix != 1 the A-list has only meta elements and does not have
162  * any leaves, in order to be able to track partial allocations.
163  */
164 void
165 hammer_alist_template(hammer_alist_config_t bl, int32_t blocks,
166                       int32_t base_radix, int32_t maxmeta)
167 {
168         int radix;
169         int skip;
170
171         /*
172          * Calculate radix and skip field used for scanning.  The leaf nodes
173          * in our tree are either BMAP or META nodes depending on whether
174          * we chain to a lower level allocation layer or not.
175          */
176         if (base_radix == 1)
177                 radix = HAMMER_ALIST_BMAP_RADIX;
178         else
179                 radix = HAMMER_ALIST_META_RADIX;
180         skip = 1;
181
182         while (radix < blocks / base_radix) {
183                 radix *= HAMMER_ALIST_META_RADIX;
184                 skip = skip * HAMMER_ALIST_META_RADIX + 1;
185         }
186
187         /*
188          * Increase the radix based on the number of blocks a lower level
189          * allocator is able to handle at the 'base' of our allocator.
190          * If base_radix != 1 the caller will have to initialize the callback
191          * fields to implement the lower level allocator.
192          */
193         KKASSERT((int64_t)radix * (int64_t)base_radix < 0x80000000LL);
194         radix *= base_radix;
195
196         bzero(bl, sizeof(*bl));
197
198         bl->bl_blocks = blocks;
199         bl->bl_base_radix = base_radix;
200         bl->bl_radix = radix;
201         bl->bl_skip = skip;
202         bl->bl_rootblks = hammer_alst_radix_init(NULL, bl->bl_radix,
203                                                  bl->bl_skip, blocks);
204         ++bl->bl_rootblks;      /* one more for freeblks header */
205         if (base_radix == 1)
206                 bl->bl_terminal = 1;
207         KKASSERT(maxmeta == 0 || bl->bl_rootblks <= maxmeta);
208
209 #if defined(ALIST_DEBUG)
210         kprintf(
211                 "PRIMARY ALIST LAYER manages %d blocks"
212                 ", requiring %dK (%d bytes) of ram\n",
213                 bl->bl_blocks / bl->bl_base_radix,
214                 (bl->bl_rootblks * sizeof(struct hammer_almeta) + 1023) / 1024,
215                 (bl->bl_rootblks * sizeof(struct hammer_almeta))
216         );
217         kprintf("ALIST raw radix tree contains %d records\n", bl->bl_rootblks);
218 #endif
219 }
220
221 /*
222  * Initialize a new A-list
223  */
224 void
225 hammer_alist_init(hammer_alist_t live, int32_t start, int32_t count,
226                   enum hammer_alloc_state state)
227 {
228         hammer_alist_config_t bl = live->config;
229
230         /*
231          * Note: base_freeblks is a count, not a block number limit.
232          */
233         live->meta->bm_alist_freeblks = 0;
234         live->meta->bm_alist_base_freeblks = count;
235         hammer_alst_radix_init(live->meta + 1, bl->bl_radix,
236                                bl->bl_skip, bl->bl_blocks);
237         if (count && state == HAMMER_ASTATE_FREE)
238                 hammer_alist_free(live, start, count);
239 }
240
241 #if !defined(_KERNEL) && defined(ALIST_DEBUG)
242
243 /*
244  * hammer_alist_create()        (userland only)
245  *
246  *      create a alist capable of handling up to the specified number of
247  *      blocks.  blocks must be greater then 0
248  *
249  *      The smallest alist consists of a single leaf node capable of 
250  *      managing HAMMER_ALIST_BMAP_RADIX blocks, or (if base_radix != 1)
251  *      a single meta node capable of managing HAMMER_ALIST_META_RADIX
252  *      blocks which recurses into other storage layers for a total
253  *      handling capability of (HAMMER_ALIST_META_RADIX * base_radix) blocks.
254  *
255  *      Larger a-list's increase their capability exponentially by
256  *      HAMMER_ALIST_META_RADIX.
257  *
258  *      The block count is the total number of blocks inclusive of any
259  *      layering.  blocks can be less then base_radix and will result in
260  *      a radix tree with a single leaf node which then chains down.
261  */
262
263 hammer_alist_t 
264 hammer_alist_create(int32_t blocks, int32_t base_radix,
265                     struct malloc_type *mtype, enum hammer_alloc_state state)
266 {
267         hammer_alist_t live;
268         hammer_alist_config_t bl;
269         size_t metasize;
270
271         live = kmalloc(sizeof(*live), mtype, M_WAITOK);
272         live->config = bl = kmalloc(sizeof(*bl), mtype, M_WAITOK);
273         hammer_alist_template(bl, blocks, base_radix, 0);
274
275         metasize = sizeof(*live->meta) * bl->bl_rootblks;
276         live->meta = kmalloc(metasize, mtype, M_WAITOK);
277         bzero(live->meta, metasize);
278
279 #if defined(ALIST_DEBUG)
280         kprintf(
281                 "ALIST representing %d blocks (%d MB of swap)"
282                 ", requiring %dK (%d bytes) of ram\n",
283                 bl->bl_blocks,
284                 bl->bl_blocks * 4 / 1024,
285                 (bl->bl_rootblks * sizeof(*live->meta) + 1023) / 1024,
286                 (bl->bl_rootblks * sizeof(*live->meta))
287         );
288         if (base_radix != 1) {
289                 kprintf("ALIST recurses when it reaches a base_radix of %d\n",
290                         base_radix);
291         }
292         kprintf("ALIST raw radix tree contains %d records\n", bl->bl_rootblks);
293 #endif
294         hammer_alist_init(live, 0, blocks, state);
295         return(live);
296 }
297
298 void
299 hammer_alist_destroy(hammer_alist_t live, struct malloc_type *mtype)
300 {
301         kfree(live->config, mtype);
302         kfree(live->meta, mtype);
303         live->config = NULL;
304         live->meta = NULL;
305         kfree(live, mtype);
306 }
307
308 #endif
309
310 /*
311  * hammer_alist_alloc()
312  *
313  *      Reserve space in the block bitmap.  Return the base of a contiguous
314  *      region or HAMMER_ALIST_BLOCK_NONE if space could not be allocated.
315  */
316
317 int32_t 
318 hammer_alist_alloc(hammer_alist_t live, int32_t count)
319 {
320         int32_t blk = HAMMER_ALIST_BLOCK_NONE;
321         hammer_alist_config_t bl = live->config;
322
323         KKASSERT((count | (count - 1)) == (count << 1) - 1);
324
325         if (bl && count <= bl->bl_radix) {
326                 /*
327                  * When skip is 1 we are at a leaf.  If we are the terminal
328                  * allocator we use our native leaf functions and radix will
329                  * be HAMMER_ALIST_BMAP_RADIX.  Otherwise we are a meta node
330                  * which will chain to another allocator.
331                  */
332                 if (bl->bl_skip == 1 && bl->bl_terminal) {
333 #ifndef _KERNEL
334                         KKASSERT(bl->bl_radix == HAMMER_ALIST_BMAP_RADIX);
335 #endif
336                         blk = hammer_alst_leaf_alloc_fwd(
337                                     live->meta + 1, 0, count, 0);
338                 } else {
339                         blk = hammer_alst_meta_alloc_fwd(
340                                     live, live->meta + 1,
341                                     0, count, bl->bl_radix, bl->bl_skip, 0);
342                 }
343                 if (blk != HAMMER_ALIST_BLOCK_NONE)
344                         live->meta->bm_alist_freeblks -= count;
345         }
346         return(blk);
347 }
348
349 int32_t 
350 hammer_alist_alloc_fwd(hammer_alist_t live, int32_t count, int32_t atblk)
351 {
352         int32_t blk = HAMMER_ALIST_BLOCK_NONE;
353         hammer_alist_config_t bl = live->config;
354
355         KKASSERT((count | (count - 1)) == (count << 1) - 1);
356
357         if (bl && count <= bl->bl_radix) {
358                 /*
359                  * When skip is 1 we are at a leaf.  If we are the terminal
360                  * allocator we use our native leaf functions and radix will
361                  * be HAMMER_ALIST_BMAP_RADIX.  Otherwise we are a meta node
362                  * which will chain to another allocator.
363                  */
364                 if (bl->bl_skip == 1 && bl->bl_terminal) {
365 #ifndef _KERNEL
366                         KKASSERT(bl->bl_radix == HAMMER_ALIST_BMAP_RADIX);
367 #endif
368                         blk = hammer_alst_leaf_alloc_fwd(
369                                     live->meta + 1, 0, count, atblk);
370                 } else {
371                         blk = hammer_alst_meta_alloc_fwd(
372                                     live, live->meta + 1,
373                                     0, count, bl->bl_radix, bl->bl_skip, atblk);
374                 }
375                 if (blk != HAMMER_ALIST_BLOCK_NONE)
376                         live->meta->bm_alist_freeblks -= count;
377         }
378         return(blk);
379 }
380
381 int32_t 
382 hammer_alist_alloc_rev(hammer_alist_t live, int32_t count, int32_t atblk)
383 {
384         hammer_alist_config_t bl = live->config;
385         int32_t blk = HAMMER_ALIST_BLOCK_NONE;
386
387         KKASSERT((count | (count - 1)) == (count << 1) - 1);
388
389         if (bl && count < bl->bl_radix) {
390                 if (bl->bl_skip == 1 && bl->bl_terminal) {
391 #ifndef _KERNEL
392                         KKASSERT(bl->bl_radix == HAMMER_ALIST_BMAP_RADIX);
393 #endif
394                         blk = hammer_alst_leaf_alloc_rev(
395                                     live->meta + 1, 0, count, atblk);
396                 } else {
397                         blk = hammer_alst_meta_alloc_rev(
398                                     live, live->meta + 1,
399                                     0, count, bl->bl_radix, bl->bl_skip, atblk);
400                 }
401                 if (blk != HAMMER_ALIST_BLOCK_NONE)
402                         live->meta->bm_alist_freeblks -= count;
403         }
404         return(blk);
405 }
406
407 /*
408  * hammer_alist_find()
409  *
410  *      Locate the first block >= atblk marked as allocated in the A-list
411  *      and return it.  Return HAMMER_ALIST_BLOCK_NONE if no block could
412  *      be found.
413  */
414 int32_t
415 hammer_alist_find(hammer_alist_t live, int32_t atblk)
416 {
417         hammer_alist_config_t bl = live->config;
418         KKASSERT(live->config != NULL);
419         KKASSERT(atblk >= 0);
420         atblk = hammer_alst_find(live, live->meta + 1, 0, bl->bl_radix,
421                                  bl->bl_skip, atblk);
422         return(atblk);
423 }
424
425 /*
426  * hammer_alist_free()
427  *
428  *      Free up space in the block bitmap.  Return the base of a contiguous
429  *      region.  Panic if an inconsistancy is found.
430  *
431  *      Unlike allocations, there are no alignment requirements for blkno or
432  *      count when freeing blocks.
433  */
434
435 void 
436 hammer_alist_free(hammer_alist_t live, int32_t blkno, int32_t count)
437 {
438         hammer_alist_config_t bl = live->config;
439
440         KKASSERT(blkno + count <= bl->bl_blocks);
441         if (bl->bl_skip == 1 && bl->bl_terminal) {
442 #ifndef _KERNEL
443                 KKASSERT(bl->bl_radix == HAMMER_ALIST_BMAP_RADIX);
444 #endif
445                 hammer_alst_leaf_free(live->meta + 1, blkno, count);
446         } else {
447                 hammer_alst_meta_free(live, live->meta + 1,
448                                       blkno, count,
449                                       bl->bl_radix, bl->bl_skip, 0);
450         }
451         live->meta->bm_alist_freeblks += count;
452 }
453
454 /*
455  * Recover an A-list.  This will dive down to the leaves and regenerate
456  * the hints and the freeblks count.  This function will also recurse
457  * through any stacked A-lists.  > 0 is returned on success, a negative
458  * error code on failure.
459  *
460  * Since A-lists have no pointers the only thing that can prevent recovery
461  * is an I/O error in e.g. a stacked A-list.  This doesn't mean the recovered
462  * map will be meaningful, however.
463  *
464  * blk is usually passed as 0 at the top level and is adjusted as the recovery
465  * code scans the A-list.  It is only used when recursing down a stacked
466  * A-list.
467  */
468 int
469 hammer_alist_recover(hammer_alist_t live, int32_t blk, int32_t start,
470                      int32_t count)
471 {
472         hammer_alist_config_t bl = live->config;
473         struct hammer_alist_recover info;
474         int32_t r;
475
476         info.live = live;
477         info.error = 0;
478
479         live->meta->bm_alist_freeblks = 0;
480         live->meta->bm_alist_base_freeblks = count;
481         hammer_alst_radix_recover(&info, live->meta + 1, blk, bl->bl_radix,
482                                   bl->bl_skip, bl->bl_blocks);
483         if (info.error)
484                 return(info.error);
485
486         /*
487          * Any garbage between 0 and start, and >= start, is removed.
488          */
489         while ((r = hammer_alist_find(live, 0)) != HAMMER_ALIST_BLOCK_NONE &&
490                r < start) {
491                 hammer_alist_free(live, r, 1);
492         }
493         while ((r = hammer_alist_find(live, start + count)) !=
494                 HAMMER_ALIST_BLOCK_NONE) {
495                 hammer_alist_free(live, r, 1);
496         }
497         return(live->meta->bm_alist_freeblks);
498 }
499
500 int
501 hammer_alist_isfull(hammer_alist_t live)
502 {
503         return(live->meta->bm_alist_freeblks == 0);
504 }
505
506 int
507 hammer_alist_isempty(hammer_alist_t live)
508 {
509         return((int)live->meta->bm_alist_freeblks ==
510                live->meta->bm_alist_base_freeblks);
511 }
512
513 #ifdef ALIST_DEBUG
514
515 /*
516  * alist_print()    - dump radix tree
517  */
518
519 void
520 hammer_alist_print(hammer_alist_t live, int tab)
521 {
522         hammer_alist_config_t bl = live->config;
523
524         kprintf("%*.*sALIST (%d/%d free blocks) {\n",
525                 tab, tab, "",
526                 live->meta->bm_alist_freeblks,
527                 live->meta->bm_alist_base_freeblks);
528         hammer_alst_radix_print(live, live->meta + 1, 0,
529                                 bl->bl_radix, bl->bl_skip, tab + 4);
530         kprintf("%*.*s}\n", tab, tab, "");
531 }
532
533 #endif
534
535 /************************************************************************
536  *                        ALLOCATION SUPPORT FUNCTIONS                  *
537  ************************************************************************
538  *
539  *      These support functions do all the actual work.  They may seem 
540  *      rather longish, but that's because I've commented them up.  The
541  *      actual code is straight forward.
542  *
543  */
544
545 /*
546  * hammer_alist_leaf_alloc_fwd()
547  *
548  *      Allocate at a leaf in the radix tree (a bitmap).
549  *
550  *      This is the core of the allocator and is optimized for the 1 block
551  *      and the HAMMER_ALIST_BMAP_RADIX block allocation cases.  Other cases
552  *      are somewhat slower.  The 1 block allocation case is log2 and extremely
553  *      quick.
554  */
555
556 static int32_t
557 hammer_alst_leaf_alloc_fwd(hammer_almeta_t scan, int32_t blk,
558                            int count, int32_t atblk)
559 {
560         u_int32_t orig = scan->bm_bitmap;
561         int32_t saveblk = blk;
562
563         /*
564          * Optimize bitmap all-allocated case.  Also, count = 1
565          * case assumes at least 1 bit is free in the bitmap, so
566          * we have to take care of this case here.
567          */
568         if (orig == 0) {
569                 scan->bm_bighint = 0;
570                 return(HAMMER_ALIST_BLOCK_NONE);
571         }
572
573 #if 0
574         /*
575          * Optimized code to allocate one bit out of the bitmap
576          *
577          * mask iterates e.g. 00001111 00000011 00000001
578          *
579          * mask starts at 00001111
580          */
581         if (count == 1) {
582                 u_int32_t mask;
583                 int j = HAMMER_ALIST_BMAP_RADIX/2;
584                 int r = 0;
585
586                 mask = (u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX/2);
587
588                 while (j) {
589                         if ((orig & mask) == 0) {
590                             r += j;
591                             orig >>= j;
592                         }
593                         j >>= 1;
594                         mask >>= j;
595                 }
596                 scan->bm_bitmap &= ~(1 << r);
597                 return(blk + r);
598         }
599 #endif
600
601         /*
602          * non-optimized code to allocate N bits out of the bitmap.
603          * The more bits, the faster the code runs.  It will run
604          * the slowest allocating 2 bits, but since there aren't any
605          * memory ops in the core loop (or shouldn't be, anyway),
606          * you probably won't notice the difference.
607          *
608          * Similar to the blist case, the alist code also requires
609          * allocations to be power-of-2 sized and aligned to the
610          * size of the allocation, which simplifies the algorithm.
611          */
612         {
613                 int j;
614                 int n = HAMMER_ALIST_BMAP_RADIX - count;
615                 u_int32_t mask;
616
617                 mask = (u_int32_t)-1 >> n;
618
619                 for (j = 0; j <= n; j += count) {
620                         if ((orig & mask) == mask && blk >= atblk) {
621                                 scan->bm_bitmap &= ~mask;
622                                 return(blk);
623                         }
624                         mask = mask << count;
625                         blk += count;
626                 }
627         }
628
629         /*
630          * We couldn't allocate count in this subtree, update bighint if
631          * atblk didn't interfere with the hinting mechanism.
632          */
633         if (saveblk >= atblk)
634                 scan->bm_bighint = count - 1;
635         return(HAMMER_ALIST_BLOCK_NONE);
636 }
637
638 /*
639  * This version allocates blocks in the reverse direction.
640  */
641 static int32_t
642 hammer_alst_leaf_alloc_rev(hammer_almeta_t scan, int32_t blk,
643                            int count, int32_t atblk)
644 {
645         u_int32_t orig = scan->bm_bitmap;
646         int32_t saveblk;
647
648         /*
649          * Optimize bitmap all-allocated case.  Also, count = 1
650          * case assumes at least 1 bit is free in the bitmap, so
651          * we have to take care of this case here.
652          */
653         if (orig == 0) {
654                 scan->bm_bighint = 0;
655                 return(HAMMER_ALIST_BLOCK_NONE);
656         }
657
658 #if 0
659         /*
660          * Optimized code to allocate one bit out of the bitmap
661          */
662         if (count == 1) {
663                 u_int32_t mask;
664                 int j = HAMMER_ALIST_BMAP_RADIX/2;
665                 int r = HAMMER_ALIST_BMAP_RADIX - 1;
666
667                 mask = ~((u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX/2));
668
669                 while (j) {
670                         if ((orig & mask) == 0) {
671                             r -= j;
672                             orig <<= j;
673                         }
674                         j >>= 1;
675                         mask <<= j;
676                 }
677                 scan->bm_bitmap &= ~(1 << r);
678                 return(blk + r);
679         }
680 #endif
681
682         /*
683          * non-optimized code to allocate N bits out of the bitmap.
684          * The more bits, the faster the code runs.  It will run
685          * the slowest allocating 2 bits, but since there aren't any
686          * memory ops in the core loop (or shouldn't be, anyway),
687          * you probably won't notice the difference.
688          *
689          * Similar to the blist case, the alist code also requires
690          * allocations to be power-of-2 sized and aligned to the
691          * size of the allocation, which simplifies the algorithm.
692          *
693          * initial mask if count == 2:  1100....0000
694          */
695         {
696                 int j;
697                 int n = HAMMER_ALIST_BMAP_RADIX - count;
698                 u_int32_t mask;
699
700                 mask = ((u_int32_t)-1 >> n) << n;
701                 blk += n;
702                 saveblk = blk;
703
704                 for (j = n; j >= 0; j -= count) {
705                         if ((orig & mask) == mask && blk <= atblk) {
706                                 scan->bm_bitmap &= ~mask;
707                                 return(blk);
708                         }
709                         mask = mask >> count;
710                         blk -= count;
711                 }
712         }
713
714         /*
715          * We couldn't allocate count in this subtree, update bighint if
716          * atblk didn't interfere with it.
717          */
718         if (saveblk <= atblk)
719                 scan->bm_bighint = count - 1;
720         return(HAMMER_ALIST_BLOCK_NONE);
721 }
722
723 /*
724  * hammer_alist_meta_alloc_fwd()
725  *
726  *      Allocate at a meta in the radix tree.
727  *
728  *      Attempt to allocate at a meta node.  If we can't, we update
729  *      bighint and return a failure.  Updating bighint optimize future
730  *      calls that hit this node.  We have to check for our collapse cases
731  *      and we have a few optimizations strewn in as well.
732  */
733 static int32_t
734 hammer_alst_meta_alloc_fwd(hammer_alist_t live, hammer_almeta_t scan,
735                            int32_t blk, int32_t count,
736                            int32_t radix, int skip, int32_t atblk
737 ) {
738         hammer_alist_config_t bl;
739         u_int32_t mask;
740         u_int32_t pmask;
741         int32_t saveblk;
742         int next_skip;
743         int i;
744
745         /*
746          * ALL-ALLOCATED special case
747          */
748         if (scan->bm_bitmap == 0)  {
749                 scan->bm_bighint = 0;
750                 return(HAMMER_ALIST_BLOCK_NONE);
751         }
752
753         radix /= HAMMER_ALIST_META_RADIX;
754         bl = live->config;
755
756         /*
757          * Radix now represents each bitmap entry for this meta node.  If
758          * the number of blocks being allocated can be fully represented,
759          * we allocate directly out of this meta node.
760          *
761          * Meta node bitmaps use 2 bits per block.
762          *
763          *      00      ALL-ALLOCATED - UNINITIALIZED
764          *      01      PARTIALLY-FREE/PARTIALLY-ALLOCATED
765          *      10      ALL-ALLOCATED - INITIALIZED
766          *      11      ALL-FREE      - UNINITIALIZED
767          */
768         if (count >= radix) {
769                 int n = count / radix * 2;      /* number of bits */
770                 int nd2 = n / 2;
771                 int j;
772
773                 mask = (u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX - n);
774                 saveblk = blk;
775                 for (j = 0; j < (int)HAMMER_ALIST_META_RADIX; j += nd2) {
776                         if ((scan->bm_bitmap & mask) == mask && blk >= atblk) {
777                                 /*
778                                  * NOTE: Marked all-allocate/uninitialized
779                                  * rather then all-allocated/initialized.
780                                  * See the warning at the top of the file.
781                                  */
782                                 scan->bm_bitmap &= ~mask;
783                                 return(blk);
784                         }
785                         mask <<= n;
786                         blk += radix * nd2;
787                 }
788                 if (scan->bm_bighint >= count && saveblk >= atblk)
789                         scan->bm_bighint = count >> 1;
790                 return(HAMMER_ALIST_BLOCK_NONE);
791         }
792
793         /*
794          * If the count is too big we couldn't allocate anything from a
795          * recursion even if the sub-tree were entirely free.
796          */
797         saveblk = blk;
798         if (count > radix)
799                 goto failed;
800
801         /*
802          * If not we have to recurse.
803          */
804         mask = 0x00000003;
805         pmask = 0x00000001;
806
807         if (skip == 1) {
808                 /*
809                  * If skip is 1 we are a meta leaf node, which means there
810                  * is another allocation layer we have to dive down into.
811                  */
812                 for (i = 0; i < (int)HAMMER_ALIST_META_RADIX; ++i) {
813                         /*
814                          * If the next layer is completely free then call
815                          * its init function to initialize it.
816                          */
817                         if ((scan->bm_bitmap & mask) == mask &&
818                             blk + radix > atblk) {
819                                 if (bl->bl_radix_init(live->info, blk, radix, HAMMER_ASTATE_FREE) == 0) {
820                                         /*
821                                          * NOTE: Marked all-allocate/uninit-
822                                          * ialized rather then all-allocated/
823                                          * initialized.  See the warning at
824                                          * the top of the file.
825                                          */
826                                         scan->bm_bitmap &= ~mask;
827                                         scan->bm_bitmap |= pmask;
828                                 }
829                         }
830
831                         /*
832                          * If there may be some free blocks try to allocate
833                          * out of the layer.  If the layer indicates that
834                          * it is completely full then clear both bits to
835                          * propogate the condition.
836                          */
837                         if ((scan->bm_bitmap & mask) == pmask &&
838                             blk + radix > atblk) {
839                                 int32_t r;
840                                 int32_t full;
841
842                                 r = bl->bl_radix_alloc_fwd(live->info, blk,
843                                                            radix, count, atblk,
844                                                            &full);
845                                 if (r != HAMMER_ALIST_BLOCK_NONE) {
846                                         if (full) {
847                                                 scan->bm_bitmap &= ~mask;
848                                                 scan->bm_bitmap |= pmask << 1;
849                                         }
850                                         return(r);
851                                 }
852                         }
853                         blk += radix;
854                         mask <<= 2;
855                         pmask <<= 2;
856                 }
857         } else {
858                 /*
859                  * Otherwise there are sub-records in the current a-list
860                  * layer.  We can also peek into the sub-layers to get
861                  * more accurate size hints.
862                  */
863                 next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
864                 for (i = 1; i < skip; i += next_skip) {
865                         if (scan[i].bm_bighint == (int32_t)-1) {
866                                 /* 
867                                  * Terminator
868                                  */
869                                 break;
870                         }
871
872                         /*
873                          * Initialize bitmap if allocating from the all-free
874                          * case.
875                          */
876                         if ((scan->bm_bitmap & mask) == mask) {
877                                 scan[i].bm_bitmap = (u_int32_t)-1;
878                                 scan[i].bm_bighint = radix;
879                         }
880
881                         if (count <= scan[i].bm_bighint &&
882                             blk + radix > atblk) {
883                                 /*
884                                  * count fits in object, recurse into the
885                                  * next layer.  If the next_skip is 1 it
886                                  * will be either a normal leaf or a meta
887                                  * leaf.
888                                  */
889                                 int32_t r;
890
891                                 if (next_skip == 1 && bl->bl_terminal) {
892                                         r = hammer_alst_leaf_alloc_fwd(
893                                                 &scan[i], blk, count, atblk);
894                                 } else {
895                                         r = hammer_alst_meta_alloc_fwd(
896                                                 live, &scan[i],
897                                                 blk, count,
898                                                 radix, next_skip, atblk);
899                                 }
900                                 if (r != HAMMER_ALIST_BLOCK_NONE) {
901                                         if (scan[i].bm_bitmap == 0) {
902                                                 scan->bm_bitmap &= ~mask;
903                                         } else {
904                                                 scan->bm_bitmap &= ~mask;
905                                                 scan->bm_bitmap |= pmask;
906                                         }
907                                         return(r);
908                                 }
909                         }
910                         blk += radix;
911                         mask <<= 2;
912                         pmask <<= 2;
913                 }
914         }
915
916 failed:
917         /*
918          * We couldn't allocate count in this subtree, update bighint.
919          */
920         if (scan->bm_bighint >= count && saveblk >= atblk)
921                 scan->bm_bighint = count >> 1;
922         return(HAMMER_ALIST_BLOCK_NONE);
923 }
924
925 /*
926  * This version allocates blocks in the reverse direction.
927  *
928  * This is really nasty code
929  */
930 static int32_t
931 hammer_alst_meta_alloc_rev(hammer_alist_t live, hammer_almeta_t scan,
932                            int32_t blk, int32_t count,
933                            int32_t radix, int skip, int32_t atblk
934 ) {
935         hammer_alist_config_t bl;
936         int i;
937         int j;
938         u_int32_t mask;
939         u_int32_t pmask;
940         int32_t saveblk;
941         int next_skip;
942
943         /*
944          * ALL-ALLOCATED special case
945          */
946         if (scan->bm_bitmap == 0)  {
947                 scan->bm_bighint = 0;
948                 return(HAMMER_ALIST_BLOCK_NONE);
949         }
950
951         radix /= HAMMER_ALIST_META_RADIX;
952         bl = live->config;
953
954         /*
955          * Radix now represents each bitmap entry for this meta node.  If
956          * the number of blocks being allocated can be fully represented,
957          * we allocate directly out of this meta node.
958          *
959          * Meta node bitmaps use 2 bits per block.
960          *
961          *      00      ALL-ALLOCATED (uninitialized)
962          *      01      PARTIALLY-FREE/PARTIALLY-ALLOCATED
963          *      10      ALL-ALLOCATED (initialized)
964          *      11      ALL-FREE
965          */
966         if (count >= radix) {
967                 int n = count / radix * 2;      /* number of bits */
968                 int nd2 = n / 2;                /* number of radi */
969
970                 /*
971                  * Initial mask if e.g. n == 2:  1100....0000
972                  */
973                 mask = (u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX - n) <<
974                         (HAMMER_ALIST_BMAP_RADIX - n);
975                 blk += (HAMMER_ALIST_META_RADIX - nd2) * radix;
976                 saveblk = blk;
977                 for (j = HAMMER_ALIST_META_RADIX - nd2; j >= 0; j -= nd2) {
978                         if ((scan->bm_bitmap & mask) == mask && blk <= atblk) {
979                                 scan->bm_bitmap &= ~mask;
980                                 return(blk);
981                         }
982                         mask >>= n;
983                         blk -= nd2 * radix;
984                 }
985                 if (scan->bm_bighint >= count && saveblk <= atblk)
986                         scan->bm_bighint = count >> 1;
987                 return(HAMMER_ALIST_BLOCK_NONE);
988         }
989
990         /*
991          * NOTE: saveblk must represent the entire layer, not the base blk
992          * of the last element.  Otherwise an atblk that is inside the
993          * last element can cause bighint to be updated for a failed
994          * allocation when we didn't actually test all available blocks.
995          */
996
997         if (skip == 1) {
998                 /*
999                  * We need the recurse but we are at a meta node leaf, which
1000                  * means there is another layer under us.
1001                  */
1002                 mask = 0xC0000000;
1003                 pmask = 0x40000000;
1004                 blk += radix * HAMMER_ALIST_META_RADIX;
1005
1006                 saveblk = blk;
1007                 blk -= radix;
1008
1009                 for (i = 0; i < (int)HAMMER_ALIST_META_RADIX; ++i) {
1010                         /*
1011                          * If the next layer is completely free then call
1012                          * its init function to initialize it.  The init
1013                          * function is responsible for the initial freeing.
1014                          */
1015                         if ((scan->bm_bitmap & mask) == mask && blk <= atblk) {
1016                                 if (bl->bl_radix_init(live->info, blk, radix, HAMMER_ASTATE_FREE) == 0) {
1017                                         scan->bm_bitmap &= ~mask;
1018                                         scan->bm_bitmap |= pmask;
1019                                 }
1020                         }
1021
1022                         /*
1023                          * If there may be some free blocks try to allocate
1024                          * out of the layer.  If the layer indicates that
1025                          * it is completely full then clear both bits to
1026                          * propogate the condition.
1027                          */
1028                         if ((scan->bm_bitmap & mask) == pmask && blk <= atblk) {
1029                                 int32_t r;
1030                                 int32_t full;
1031
1032                                 r = bl->bl_radix_alloc_rev(live->info, blk,
1033                                                            radix, count,
1034                                                            atblk, &full);
1035                                 if (r != HAMMER_ALIST_BLOCK_NONE) {
1036                                         if (full) {
1037                                                 scan->bm_bitmap &= ~mask;
1038                                                 scan->bm_bitmap |= pmask << 1;
1039                                         }
1040                                         return(r);
1041                                 }
1042                         }
1043                         mask >>= 2;
1044                         pmask >>= 2;
1045                         blk -= radix;
1046                 }
1047         } else {
1048                 /*
1049                  * Since we are going in the reverse direction we need an
1050                  * extra loop to determine if there is a terminator, then
1051                  * run backwards.
1052                  *
1053                  * This is a little weird but we do not want to overflow the
1054                  * mask/pmask in the loop.
1055                  */
1056                 next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
1057                 j = 0;
1058                 for (i = 1; i < skip; i += next_skip) {
1059                         if (scan[i].bm_bighint == (int32_t)-1) {
1060                                 KKASSERT(j != 0);
1061                                 break;
1062                         }
1063                         blk += radix;
1064                         j += 2;
1065                 }
1066
1067                 saveblk = blk;
1068                 blk -= radix;
1069                 j -= 2;
1070                 mask = 0x00000003 << j;
1071                 pmask = 0x00000001 << j;
1072                 i -= next_skip;
1073
1074                 while (i >= 1) {
1075                         /*
1076                          * Initialize the bitmap in the child if allocating
1077                          * from the all-free case.
1078                          */
1079                         if ((scan->bm_bitmap & mask) == mask) {
1080                                 scan[i].bm_bitmap = (u_int32_t)-1;
1081                                 scan[i].bm_bighint = radix;
1082                         }
1083
1084                         /*
1085                          * Handle various count cases.  Bighint may be too
1086                          * large but is never too small.
1087                          */
1088                         if (count <= scan[i].bm_bighint && blk <= atblk) {
1089                                 /*
1090                                  * count fits in object
1091                                  */
1092                                 int32_t r;
1093                                 if (next_skip == 1 && bl->bl_terminal) {
1094                                         r = hammer_alst_leaf_alloc_rev(
1095                                                 &scan[i], blk, count, atblk);
1096                                 } else {
1097                                         r = hammer_alst_meta_alloc_rev(
1098                                                 live, &scan[i],
1099                                                 blk, count,
1100                                                 radix, next_skip, atblk);
1101                                 }
1102                                 if (r != HAMMER_ALIST_BLOCK_NONE) {
1103                                         if (scan[i].bm_bitmap == 0) {
1104                                                 scan->bm_bitmap &= ~mask;
1105                                         } else {
1106                                                 scan->bm_bitmap &= ~mask;
1107                                                 scan->bm_bitmap |= pmask;
1108                                         }
1109                                         return(r);
1110                                 }
1111                         }
1112                         blk -= radix;
1113                         mask >>= 2;
1114                         pmask >>= 2;
1115                         i -= next_skip;
1116                 }
1117         }
1118
1119         /*
1120          * We couldn't allocate count in this subtree, update bighint.
1121          * Since we are restricted to powers of 2, the next highest count
1122          * we might be able to allocate is (count >> 1).
1123          */
1124         if (scan->bm_bighint >= count && saveblk <= atblk)
1125                 scan->bm_bighint = count >> 1;
1126         return(HAMMER_ALIST_BLOCK_NONE);
1127 }
1128
1129 /*
1130  * HAMMER_ALST_FIND()
1131  *
1132  * Locate the first allocated block greater or equal to atblk.
1133  */
1134 static int32_t
1135 hammer_alst_find(hammer_alist_t live, hammer_almeta_t scan, int32_t blk,
1136                  int32_t radix, int32_t skip, int32_t atblk)
1137 {
1138         u_int32_t mask;
1139         u_int32_t pmask;
1140         int32_t next_skip;
1141         int32_t tmpblk;
1142         int i;
1143         int j;
1144
1145         /*
1146          * Leaf node (currently hammer_alist_find() only works on terminal
1147          * a-list's and the case is asserted in hammer_alist_find()).
1148          */
1149         if (skip == 1 && live->config->bl_terminal) {
1150                 if (scan->bm_bitmap == (u_int32_t)-1)
1151                         return(HAMMER_ALIST_BLOCK_NONE);
1152                 for (i = 0; i < (int)HAMMER_ALIST_BMAP_RADIX; ++i) {
1153                         if (blk + i < atblk)
1154                                 continue;
1155                         if ((scan->bm_bitmap & (1 << i)) == 0)
1156                                 return(blk + i);
1157                 }
1158                 return(HAMMER_ALIST_BLOCK_NONE);
1159         }
1160
1161         /*
1162          * Meta
1163          */
1164         radix /= HAMMER_ALIST_META_RADIX;
1165         next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
1166         mask = 0x00000003;
1167         pmask = 0x00000001;
1168         for (j = 0, i = 1; j < HAMMER_ALIST_META_RADIX; (i += next_skip), ++j) {
1169                 /*
1170                  * Check Terminator
1171                  */
1172                 if (scan[i].bm_bighint == (int32_t)-1) {
1173                         break;
1174                 }
1175
1176                 /*
1177                  * Recurse if this meta might contain a desired block.
1178                  */
1179                 if (blk + radix > atblk) {
1180                         if ((scan->bm_bitmap & mask) == 0) {
1181                                 /*
1182                                  * 00 - all-allocated, uninitialized
1183                                  */
1184                                 return(atblk < blk ? blk : atblk);
1185                         } else if ((scan->bm_bitmap & mask) == (pmask << 1)) {
1186                                 /*
1187                                  * 10 - all-allocated, initialized
1188                                  */
1189                                 return(atblk < blk ? blk : atblk);
1190                         } else if ((scan->bm_bitmap & mask) == mask) {
1191                                 /* 
1192                                  * 11 - all-free (skip)
1193                                  */
1194                         } else if (next_skip == 0) {
1195                                 /*
1196                                  * Partially allocated but we have to recurse
1197                                  * into a stacked A-list.
1198                                  */
1199                                 tmpblk = live->config->bl_radix_find(
1200                                                 live->info, blk, radix, atblk);
1201                                 if (tmpblk != HAMMER_ALIST_BLOCK_NONE)
1202                                         return(tmpblk);
1203                         } else if ((scan->bm_bitmap & mask) == pmask) {
1204                                 /*
1205                                  * 01 - partially-allocated
1206                                  */
1207                                 tmpblk = hammer_alst_find(live, &scan[i],
1208                                                           blk, radix,
1209                                                           next_skip, atblk);
1210                                 if (tmpblk != HAMMER_ALIST_BLOCK_NONE)
1211                                         return(tmpblk);
1212
1213                         }
1214                 }
1215                 mask <<= 2;
1216                 pmask <<= 2;
1217                 blk += radix;
1218         }
1219         return(HAMMER_ALIST_BLOCK_NONE);
1220 }
1221
1222 /*
1223  * HAMMER_ALST_LEAF_FREE()
1224  *
1225  *      Free allocated blocks from leaf bitmap.  The allocation code is
1226  *      restricted to powers of 2, the freeing code is not.
1227  */
1228 static void
1229 hammer_alst_leaf_free(hammer_almeta_t scan, int32_t blk, int count) {
1230         /*
1231          * free some data in this bitmap
1232          *
1233          * e.g.
1234          *      0000111111111110000
1235          *          \_________/\__/
1236          *              v        n
1237          */
1238         int n = blk & (HAMMER_ALIST_BMAP_RADIX - 1);
1239         u_int32_t mask;
1240
1241         mask = ((u_int32_t)-1 << n) &
1242             ((u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX - count - n));
1243
1244         if (scan->bm_bitmap & mask)
1245                 panic("hammer_alst_radix_free: freeing free block");
1246         scan->bm_bitmap |= mask;
1247
1248         /*
1249          * We could probably do a better job here.  We are required to make
1250          * bighint at least as large as the biggest contiguous block of 
1251          * data.  If we just shoehorn it, a little extra overhead will
1252          * be incured on the next allocation (but only that one typically).
1253          */
1254         scan->bm_bighint = HAMMER_ALIST_BMAP_RADIX;
1255 }
1256
1257 /*
1258  * BLST_META_FREE()
1259  *
1260  *      Free allocated blocks from radix tree meta info.
1261  *
1262  *      This support routine frees a range of blocks from the bitmap.
1263  *      The range must be entirely enclosed by this radix node.  If a
1264  *      meta node, we break the range down recursively to free blocks
1265  *      in subnodes (which means that this code can free an arbitrary
1266  *      range whereas the allocation code cannot allocate an arbitrary
1267  *      range).
1268  */
1269
1270 static void 
1271 hammer_alst_meta_free(hammer_alist_t live, hammer_almeta_t scan, 
1272                       int32_t freeBlk, int32_t count,
1273                       int32_t radix, int skip, int32_t blk
1274 ) {
1275         hammer_alist_config_t bl;
1276         int next_skip;
1277         u_int32_t mask;
1278         u_int32_t pmask;
1279         int i;
1280
1281         /*
1282          * Break the free down into its components.  Because it is so easy
1283          * to implement, frees are not limited to power-of-2 sizes.
1284          *
1285          * Each block in a meta-node bitmap takes two bits.
1286          */
1287         radix /= HAMMER_ALIST_META_RADIX;
1288         bl = live->config;
1289
1290         i = (freeBlk - blk) / radix;
1291         blk += i * radix;
1292         mask = 0x00000003 << (i * 2);
1293         pmask = 0x00000001 << (i * 2);
1294
1295         if (skip == 1) {
1296                 /*
1297                  * Our meta node is a leaf node, which means it must recurse
1298                  * into another allocator.
1299                  */
1300                 while (i < (int)HAMMER_ALIST_META_RADIX &&
1301                        blk < freeBlk + count) {
1302                         int32_t v;
1303
1304                         v = blk + radix - freeBlk;
1305                         if (v > count)
1306                                 v = count;
1307
1308                         if (scan->bm_bighint == (int32_t)-1)
1309                                 panic("hammer_alst_meta_free: freeing unexpected range");
1310                         KKASSERT((scan->bm_bitmap & mask) != mask);
1311
1312                         if (freeBlk == blk && count >= radix) {
1313                                 /*
1314                                  * Freeing an entire zone.  Only recurse if
1315                                  * the zone was initialized.  A 00 state means
1316                                  * that the zone is marked all-allocated,
1317                                  * but was never initialized.
1318                                  *
1319                                  * Then set the zone to the all-free state (11).
1320                                  */
1321                                 int32_t empty;
1322
1323                                 if (scan->bm_bitmap & mask) {
1324                                         bl->bl_radix_free(live->info, blk, radix,
1325                                                           freeBlk - blk, v, &empty);
1326                                         KKASSERT(empty);
1327                                         bl->bl_radix_destroy(live->info, blk, radix);
1328                                 }
1329                                 scan->bm_bitmap |= mask;
1330                                 scan->bm_bighint = radix * HAMMER_ALIST_META_RADIX;
1331                                 /* XXX bighint not being set properly */
1332                         } else {
1333                                 /*
1334                                  * Recursion case, partial free.  If 00 the
1335                                  * zone is marked all allocated but has never
1336                                  * been initialized, so we init it.
1337                                  */
1338                                 int32_t empty;
1339
1340                                 if ((scan->bm_bitmap & mask) == 0)
1341                                         bl->bl_radix_init(live->info, blk, radix, HAMMER_ASTATE_ALLOC);
1342                                 bl->bl_radix_free(live->info, blk, radix,
1343                                                   freeBlk - blk, v, &empty);
1344                                 if (empty) {
1345                                         scan->bm_bitmap |= mask;
1346                                         scan->bm_bighint = radix * HAMMER_ALIST_META_RADIX;
1347                                         bl->bl_radix_destroy(live->info, blk, radix);
1348                                         /* XXX bighint not being set properly */
1349                                 } else {
1350                                         scan->bm_bitmap &= ~mask;
1351                                         scan->bm_bitmap |= pmask;
1352                                         if (scan->bm_bighint < radix / 2)
1353                                                 scan->bm_bighint = radix / 2;
1354                                         /* XXX bighint not being set properly */
1355                                 }
1356                         }
1357                         ++i;
1358                         mask <<= 2;
1359                         pmask <<= 2;
1360                         count -= v;
1361                         freeBlk += v;
1362                         blk += radix;
1363                 }
1364         } else {
1365                 next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
1366                 i = 1 + i * next_skip;
1367
1368                 while (i <= skip && blk < freeBlk + count) {
1369                         int32_t v;
1370
1371                         KKASSERT(mask != 0);
1372                         KKASSERT(count != 0);
1373
1374                         v = blk + radix - freeBlk;
1375                         if (v > count)
1376                                 v = count;
1377
1378                         if (scan->bm_bighint == (int32_t)-1)
1379                                 panic("hammer_alst_meta_free: freeing unexpected range");
1380
1381                         if (freeBlk == blk && count >= radix) {
1382                                 /*
1383                                  * All-free case, no need to update sub-tree
1384                                  */
1385                                 scan->bm_bitmap |= mask;
1386                                 scan->bm_bighint = radix * HAMMER_ALIST_META_RADIX;
1387                                 /* XXX bighint not being set properly */
1388                         } else {
1389                                 /*
1390                                  * Recursion case
1391                                  */
1392                                 if (next_skip == 1 && bl->bl_terminal) {
1393                                         hammer_alst_leaf_free(&scan[i], freeBlk, v);
1394                                 } else {
1395                                         hammer_alst_meta_free(live, &scan[i],
1396                                                               freeBlk, v,
1397                                                               radix, next_skip,
1398                                                               blk);
1399                                 }
1400                                 if (scan[i].bm_bitmap == (u_int32_t)-1) {
1401                                         scan->bm_bitmap |= mask;
1402                                         /* XXX bighint not set properly */
1403                                         scan->bm_bighint = radix * HAMMER_ALIST_META_RADIX;
1404                                 } else {
1405                                         scan->bm_bitmap &= ~mask;
1406                                         scan->bm_bitmap |= pmask;
1407                                         /* XXX bighint not set properly */
1408                                         if (scan->bm_bighint < scan[i].bm_bighint)
1409                                                 scan->bm_bighint = scan[i].bm_bighint;
1410                                 }
1411                         }
1412                         mask <<= 2;
1413                         pmask <<= 2;
1414                         count -= v;
1415                         freeBlk += v;
1416                         blk += radix;
1417                         i += next_skip;
1418                 }
1419         }
1420 }
1421
1422 /*
1423  * hammer_alst_radix_init()
1424  *
1425  *      Initialize our meta structures and bitmaps and calculate the exact
1426  *      number of meta-nodes required to manage 'count' blocks.  
1427  *
1428  *      The required space may be truncated due to early termination records.
1429  */
1430 static int32_t  
1431 hammer_alst_radix_init(hammer_almeta_t scan, int32_t radix,
1432                        int skip, int32_t count)
1433 {
1434         int i;
1435         int next_skip;
1436         int32_t memindex = 1;
1437         u_int32_t mask;
1438         u_int32_t pmask;
1439
1440         /*
1441          * Basic initialization of the almeta for meta or leaf nodes.  This
1442          * marks the element as all-allocated.
1443          */
1444         if (scan) {
1445                 scan->bm_bighint = 0;
1446                 scan->bm_bitmap = 0;
1447         }
1448
1449         /*
1450          * We are at a terminal node, we only eat one meta element.   If
1451          * live->config->bl_terminal is set this is a leaf node, otherwise
1452          * it is a meta node for a stacked A-list.  We do NOT recurse into
1453          * stacked A-lists but simply mark the entire stack as all-free using
1454          * code 00 (meaning all-free & uninitialized).
1455          */
1456         if (skip == 1)
1457                 return(memindex);
1458
1459         /*
1460          * Meta node.  If allocating the entire object we can special
1461          * case it.  However, we need to figure out how much memory
1462          * is required to manage 'count' blocks, so we continue on anyway.
1463          */
1464         radix /= HAMMER_ALIST_META_RADIX;
1465         next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
1466         mask = 0x00000003;
1467         pmask = 0x00000001;
1468
1469         for (i = 1; i < skip; i += next_skip) {
1470                 /*
1471                  * We eat up to this record
1472                  */
1473                 memindex = i;
1474
1475                 KKASSERT(mask != 0);
1476
1477                 if (count >= radix) {
1478                         /*
1479                          * Allocate the entire object
1480                          */
1481                         memindex += hammer_alst_radix_init(
1482                             ((scan) ? &scan[i] : NULL),
1483                             radix,
1484                             next_skip,
1485                             radix
1486                         );
1487                         count -= radix;
1488                         /* already marked as wholely allocated */
1489                 } else if (count > 0) {
1490                         /*
1491                          * Allocate a partial object
1492                          */
1493                         memindex += hammer_alst_radix_init(
1494                             ((scan) ? &scan[i] : NULL),
1495                             radix,
1496                             next_skip,
1497                             count
1498                         );
1499                         count = 0;
1500
1501                         /*
1502                          * Mark as partially allocated
1503                          */
1504                         if (scan) {
1505                                 scan->bm_bitmap &= ~mask;
1506                                 scan->bm_bitmap |= pmask;
1507                         }
1508                 } else {
1509                         /*
1510                          * Add terminator and break out.  The terminal
1511                          * eats the meta node at scan[i].
1512                          */
1513                         ++memindex;
1514                         if (scan)
1515                                 scan[i].bm_bighint = (int32_t)-1;
1516                         /* already marked as wholely allocated */
1517                         break;
1518                 }
1519                 mask <<= 2;
1520                 pmask <<= 2;
1521         }
1522         return(memindex);
1523 }
1524
1525 /*
1526  * hammer_alst_radix_recover()
1527  *
1528  *      This code is basically a duplicate of hammer_alst_radix_init()
1529  *      except it recovers the a-list instead of initializes it.
1530  */
1531 static void     
1532 hammer_alst_radix_recover(hammer_alist_recover_t info, hammer_almeta_t scan,
1533                           int32_t blk, int32_t radix, int skip, int32_t count)
1534 {
1535         hammer_alist_t live = info->live;
1536         u_int32_t mask;
1537         u_int32_t pmask;
1538         int next_skip;
1539         int i;
1540         int j;
1541         int n;
1542
1543         /*
1544          * Don't try to recover bighint, just set it to its maximum
1545          * value and let the A-list allocations reoptimize it.  XXX
1546          */
1547         scan->bm_bighint = radix;
1548
1549         /*
1550          * If we are at a terminal node (i.e. not stacked on top of another
1551          * A-list), just count the free blocks.
1552          */
1553         if (skip == 1 && live->config->bl_terminal) {
1554                 for (i = 0; i < (int)HAMMER_ALIST_BMAP_RADIX; ++i) {
1555                         if (scan->bm_bitmap & (1 << i))
1556                                 ++info->live->meta->bm_alist_freeblks;
1557                 }
1558                 return;
1559         }
1560
1561         /*
1562          * Recursive meta node (next_skip != 0) or terminal meta
1563          * node (next_skip == 0).
1564          */
1565         radix /= HAMMER_ALIST_META_RADIX;
1566         next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
1567         mask = 0x00000003;
1568         pmask = 0x00000001;
1569
1570         for (i = 1, j = 0; j < (int)HAMMER_ALIST_META_RADIX;
1571               ++j, (i += next_skip)) {
1572                 /*
1573                  * Check mask:
1574                  *
1575                  *      00      ALL-ALLOCATED - UNINITIALIZED
1576                  *      01      PARTIALLY-FREE/PARTIALLY-ALLOCATED
1577                  *      10      ALL-ALLOCATED - INITIALIZED
1578                  *      11      ALL-FREE      - UNINITIALIZED
1579                  */
1580                 KKASSERT(mask);
1581                 if (count >= radix) {
1582                         /*
1583                          * Recover the entire object
1584                          */
1585                         if ((scan->bm_bitmap & mask) == 0) {
1586                                 /*
1587                                  * All-allocated (uninited), do nothing
1588                                  */
1589                         } else if ((scan->bm_bitmap & mask) == mask) {
1590                                 /*
1591                                  * All-free (uninited), do nothing
1592                                  */
1593                                 live->meta->bm_alist_freeblks += radix;
1594                         } else if (next_skip) {
1595                                 /*
1596                                  * Normal meta node, initialized.  Recover and
1597                                  * adjust to either an all-allocated (inited)
1598                                  * or partially-allocated state.
1599                                  */
1600                                 hammer_alst_radix_recover(
1601                                     info,
1602                                     &scan[i],
1603                                     blk,
1604                                     radix,
1605                                     next_skip,
1606                                     radix
1607                                 );
1608                                 if (scan[i].bm_bitmap == 0) {
1609                                         scan->bm_bitmap =
1610                                             (scan->bm_bitmap & ~mask) |
1611                                             (pmask << 1);
1612                                 } else if (scan[i].bm_bitmap == (u_int32_t)-1) {
1613                                         scan->bm_bitmap |= mask;
1614                                 } else {
1615                                         scan->bm_bitmap =
1616                                             (scan->bm_bitmap & ~mask) | pmask;
1617                                 }
1618                         } else {
1619                                 /*
1620                                  * Stacked meta node, recurse.
1621                                  */
1622                                 n = live->config->bl_radix_recover(
1623                                             live->info,
1624                                             blk, radix, radix);
1625                                 if (n >= 0) {
1626                                         live->meta->bm_alist_freeblks += n;
1627                                         if (n == 0) {
1628                                                 scan->bm_bitmap =
1629                                                     (scan->bm_bitmap & ~mask) |
1630                                                     (pmask << 1);
1631                                         } else if (n == radix) {
1632                                                 scan->bm_bitmap |= mask;
1633                                         } else {
1634                                                 scan->bm_bitmap =
1635                                                     (scan->bm_bitmap & ~mask) |
1636                                                     pmask;
1637                                         }
1638                                 } else {
1639                                         info->error = n;
1640                                 }
1641                         }
1642                         count -= radix;
1643                 } else if (count > 0) {
1644                         /*
1645                          * Recover a partial object.  The object can become
1646                          * wholely allocated but never wholely free.
1647                          */
1648                         if (next_skip) {
1649                                 hammer_alst_radix_recover(
1650                                     info,
1651                                     &scan[i],
1652                                     blk,
1653                                     radix,
1654                                     next_skip,
1655                                     count
1656                                 );
1657                                 if (scan[i].bm_bitmap == 0) {
1658                                         scan->bm_bitmap =
1659                                             (scan->bm_bitmap & ~mask) |
1660                                             (pmask << 1);
1661                                 } else {
1662                                         scan->bm_bitmap =
1663                                             (scan->bm_bitmap & ~mask) | pmask;
1664                                 }
1665                         } else {
1666                                 n = live->config->bl_radix_recover(
1667                                             live->info,
1668                                             blk, radix, count);
1669                                 if (n >= 0) {
1670                                         live->meta->bm_alist_freeblks += n;
1671                                         if (n == 0) {
1672                                                 scan->bm_bitmap =
1673                                                     (scan->bm_bitmap & ~mask) |
1674                                                     (pmask << 1);
1675                                         } else {
1676                                                 scan->bm_bitmap =
1677                                                     (scan->bm_bitmap & ~mask) |
1678                                                     pmask;
1679                                         }
1680                                 } else {
1681                                         info->error = n;
1682                                 }
1683                         }
1684                         count = 0;
1685                 } else if (next_skip) {
1686                         /*
1687                          * Add terminator.  The terminator eats the meta
1688                          * node at scan[i].  There is only ONE terminator,
1689                          * make sure we don't write out any more (set count to
1690                          * -1) or we may overflow our allocation.
1691                          */
1692                         if (count == 0) {
1693                                 scan[i].bm_bighint = (int32_t)-1;
1694                                 count = -1;
1695                         }
1696                         scan->bm_bitmap &= ~mask;       /* all-allocated/uni */
1697                 } else {
1698                         scan->bm_bitmap &= ~mask;       /* all-allocated/uni */
1699                 }
1700                 mask <<= 2;
1701                 pmask <<= 2;
1702                 blk += radix;
1703         }
1704 }
1705
1706 #ifdef ALIST_DEBUG
1707
1708 static void     
1709 hammer_alst_radix_print(hammer_alist_t live, hammer_almeta_t scan,
1710                         int32_t blk, int32_t radix, int skip, int tab)
1711 {
1712         int i;
1713         int next_skip;
1714         int lastState = 0;
1715         u_int32_t mask;
1716
1717         if (skip == 1 && live->config->bl_terminal) {
1718                 kprintf(
1719                     "%*.*s(%04x,%d): bitmap %08x big=%d\n", 
1720                     tab, tab, "",
1721                     blk, radix,
1722                     scan->bm_bitmap,
1723                     scan->bm_bighint
1724                 );
1725                 return;
1726         }
1727
1728         if (scan->bm_bitmap == 0) {
1729                 kprintf(
1730                     "%*.*s(%04x,%d) ALL ALLOCATED\n",
1731                     tab, tab, "",
1732                     blk,
1733                     radix
1734                 );
1735                 return;
1736         }
1737         if (scan->bm_bitmap == (u_int32_t)-1) {
1738                 kprintf(
1739                     "%*.*s(%04x,%d) ALL FREE\n",
1740                     tab, tab, "",
1741                     blk,
1742                     radix
1743                 );
1744                 return;
1745         }
1746
1747         kprintf(
1748             "%*.*s(%04x,%d): %s (%d) bitmap=%08x big=%d {\n",
1749             tab, tab, "",
1750             blk, radix,
1751             (skip == 1 ? "LAYER" : "subtree"),
1752             radix,
1753             scan->bm_bitmap,
1754             scan->bm_bighint
1755         );
1756
1757         radix /= HAMMER_ALIST_META_RADIX;
1758         tab += 4;
1759         mask = 0x00000003;
1760
1761         if (skip == 1) {
1762                 for (i = 0; i < HAMMER_ALIST_META_RADIX; ++i) {
1763                         if ((scan->bm_bitmap & mask) == mask) {
1764                                 kprintf(
1765                                     "%*.*s(%04x,%d): ALL FREE\n",
1766                                     tab, tab, "",
1767                                     blk, radix
1768                                 );
1769                         } else if ((scan->bm_bitmap & mask) == 0) {
1770                                 kprintf(
1771                                     "%*.*s(%04x,%d): ALL ALLOCATED\n",
1772                                     tab, tab, "",
1773                                     blk, radix
1774                                 );
1775                         } else {
1776                                 live->config->bl_radix_print(
1777                                                 live->info, blk, radix, tab);
1778                         }
1779                         blk += radix;
1780                         mask <<= 2;
1781                 }
1782         } else {
1783                 next_skip = ((u_int)(skip - 1) / HAMMER_ALIST_META_RADIX);
1784
1785                 for (i = 1; i < skip; i += next_skip) {
1786                         KKASSERT(mask != 0);
1787                         if (scan[i].bm_bighint == (int32_t)-1) {
1788                                 kprintf(
1789                                     "%*.*s(%04x,%d): Terminator\n",
1790                                     tab, tab, "",
1791                                     blk, radix
1792                                 );
1793                                 lastState = 0;
1794                                 break;
1795                         }
1796                         if ((scan->bm_bitmap & mask) == mask) {
1797                                 kprintf(
1798                                     "%*.*s(%04x,%d): ALL FREE\n",
1799                                     tab, tab, "",
1800                                     blk, radix
1801                                 );
1802                         } else if ((scan->bm_bitmap & mask) == 0) {
1803                                 kprintf(
1804                                     "%*.*s(%04x,%d): ALL ALLOCATED\n",
1805                                     tab, tab, "",
1806                                     blk, radix
1807                                 );
1808                         } else {
1809                                 hammer_alst_radix_print(
1810                                     live,
1811                                     &scan[i],
1812                                     blk,
1813                                     radix,
1814                                     next_skip,
1815                                     tab
1816                                 );
1817                         }
1818                         blk += radix;
1819                         mask <<= 2;
1820                 }
1821         }
1822         tab -= 4;
1823
1824         kprintf(
1825             "%*.*s}\n",
1826             tab, tab, ""
1827         );
1828 }
1829
1830 #endif
1831
1832 #ifdef ALIST_DEBUG
1833
1834 static struct hammer_alist_live **layers;       /* initialized by main */
1835 static int32_t layer_radix = -1;
1836
1837 /*
1838  * Initialize a zone.
1839  *
1840  * If allocating is non-zero this init is being called when transitioning out
1841  * of an all-free state.  Allocate the zone and mark the whole mess as being
1842  * free so the caller can then allocate out of this zone.
1843  *
1844  * If freeing this init is being called when transitioning out of an
1845  * initial all-allocated (00) state.  Allocate the zone but leave the whole
1846  * mess left all-allocated.  The caller will then free the appropriate range.
1847  */
1848 static
1849 int
1850 debug_radix_init(void *info, int32_t blk, int32_t radix,
1851                  enum hammer_alloc_state state)
1852 {
1853         hammer_alist_t layer;
1854         int layer_no = blk / layer_radix;
1855
1856         printf("lower layer: init (%04x,%d) layer_radix=%d\n",
1857                blk, radix, layer_radix);
1858         KKASSERT(layer_radix == radix);
1859         KKASSERT(layers[layer_no] == NULL);
1860         layer = layers[layer_no] = hammer_alist_create(radix, 1, NULL, state); 
1861         return(0);
1862 }
1863
1864 static
1865 int32_t
1866 debug_radix_recover(void *info, int32_t blk, int32_t radix, int32_t count)
1867 {
1868         hammer_alist_t layer;
1869         int layer_no = blk / layer_radix;
1870         int32_t n;
1871
1872         KKASSERT(layer_radix == radix);
1873         KKASSERT(layers[layer_no] != NULL);
1874         layer = layers[layer_no];
1875         n = hammer_alist_recover(layer, blk, 0, count);
1876         printf("Recover layer %d blk %d result %d/%d\n",
1877                 layer_no, blk, n, count);
1878         return(n);
1879 }
1880
1881 static
1882 int32_t
1883 debug_radix_find(void *info, int32_t blk, int32_t radix, int32_t atblk)
1884 {
1885         hammer_alist_t layer;
1886         int layer_no = blk / layer_radix;
1887         int32_t res;
1888
1889         KKASSERT(layer_radix == radix);
1890         KKASSERT(layers[layer_no] != NULL);
1891         layer = layers[layer_no];
1892         res = hammer_alist_find(layer, atblk - blk);
1893         if (res != HAMMER_ALIST_BLOCK_NONE)
1894                 res += blk;
1895         return(res);
1896 }
1897
1898 /*
1899  * This is called when a zone becomes entirely free, typically after a
1900  * call to debug_radix_free() has indicated that the entire zone is now
1901  * free.
1902  */
1903 static
1904 int
1905 debug_radix_destroy(void *info, int32_t blk, int32_t radix)
1906 {
1907         hammer_alist_t layer;
1908         int layer_no = blk / layer_radix;
1909
1910         printf("lower layer: destroy (%04x,%d)\n", blk, radix);
1911         layer = layers[layer_no];
1912         KKASSERT(layer != NULL);
1913         hammer_alist_destroy(layer, NULL);
1914         layers[layer_no] = NULL;
1915         return(0);
1916 }
1917
1918
1919 static
1920 int32_t
1921 debug_radix_alloc_fwd(void *info, int32_t blk, int32_t radix,
1922                       int32_t count, int32_t atblk, int32_t *fullp)
1923 {
1924         hammer_alist_t layer = layers[blk / layer_radix];
1925         int32_t r;
1926
1927         r = hammer_alist_alloc_fwd(layer, count, atblk - blk);
1928         *fullp = hammer_alist_isfull(layer);
1929         if (r != HAMMER_ALIST_BLOCK_NONE)
1930                 r += blk;
1931         return(r);
1932 }
1933
1934 static
1935 int32_t
1936 debug_radix_alloc_rev(void *info, int32_t blk, int32_t radix,
1937                       int32_t count, int32_t atblk, int32_t *fullp)
1938 {
1939         hammer_alist_t layer = layers[blk / layer_radix];
1940         int32_t r;
1941
1942         r = hammer_alist_alloc_rev(layer, count, atblk - blk);
1943         *fullp = hammer_alist_isfull(layer);
1944         if (r != HAMMER_ALIST_BLOCK_NONE)
1945                 r += blk;
1946         return(r);
1947 }
1948
1949 static
1950 void
1951 debug_radix_free(void *info, int32_t blk, int32_t radix,
1952                  int32_t base_blk, int32_t count, int32_t *emptyp)
1953 {
1954         int layer_no = blk / layer_radix;
1955         hammer_alist_t layer = layers[layer_no];
1956
1957         KKASSERT(layer);
1958         hammer_alist_free(layer, base_blk, count);
1959         *emptyp = hammer_alist_isempty(layer);
1960 }
1961
1962 static
1963 void
1964 debug_radix_print(void *info, int32_t blk, int32_t radix, int tab)
1965 {
1966         hammer_alist_t layer = layers[blk / layer_radix];
1967
1968         hammer_alist_print(layer, tab);
1969 }
1970
1971 int
1972 main(int ac, char **av)
1973 {
1974         int32_t size = -1;
1975         int i;
1976         hammer_alist_t live;
1977         hammer_almeta_t meta = NULL;
1978
1979         for (i = 1; i < ac; ++i) {
1980                 const char *ptr = av[i];
1981                 if (*ptr != '-') {
1982                         if (size == -1)
1983                                 size = strtol(ptr, NULL, 0);
1984                         else if (layer_radix == -1)
1985                                 layer_radix = strtol(ptr, NULL, 0);
1986                         else
1987                                 ;
1988                         continue;
1989                 }
1990                 ptr += 2;
1991                 fprintf(stderr, "Bad option: %s\n", ptr - 2);
1992                 exit(1);
1993         }
1994         if (size == -1)
1995                 size = 1024;
1996         if (layer_radix == -1)
1997                 layer_radix = 1;        /* no second storage layer */
1998         if ((size ^ (size - 1)) != (size << 1) - 1) {
1999                 fprintf(stderr, "size must be a power of 2\n");
2000                 exit(1);
2001         }
2002         if ((layer_radix ^ (layer_radix - 1)) != (layer_radix << 1) - 1) {
2003                 fprintf(stderr, "the second layer radix must be a power of 2\n");
2004                 exit(1);
2005         }
2006
2007         live = hammer_alist_create(size, layer_radix, NULL,
2008                                    HAMMER_ASTATE_ALLOC);
2009         layers = calloc(size, sizeof(hammer_alist_t));
2010
2011         printf("A-LIST TEST %d blocks, first-layer radix %d, "
2012                "second-layer radix %d\n",
2013                 size, live->config->bl_radix / layer_radix, layer_radix);
2014
2015         live->config->bl_radix_init = debug_radix_init;
2016         live->config->bl_radix_recover = debug_radix_recover;
2017         live->config->bl_radix_find = debug_radix_find;
2018         live->config->bl_radix_destroy = debug_radix_destroy;
2019         live->config->bl_radix_alloc_fwd = debug_radix_alloc_fwd;
2020         live->config->bl_radix_alloc_rev = debug_radix_alloc_rev;
2021         live->config->bl_radix_free = debug_radix_free;
2022         live->config->bl_radix_print = debug_radix_print;
2023
2024         hammer_alist_free(live, 0, size);
2025
2026         for (;;) {
2027                 char buf[1024];
2028                 int32_t da = 0;
2029                 int32_t count = 0;
2030                 int32_t atblk;
2031                 int32_t blk;
2032
2033                 kprintf("%d/%d> ",
2034                         live->meta->bm_alist_freeblks, size);
2035                 fflush(stdout);
2036                 if (fgets(buf, sizeof(buf), stdin) == NULL)
2037                         break;
2038                 switch(buf[0]) {
2039                 case 'p':
2040                         hammer_alist_print(live, 0);
2041                         atblk = 0;
2042                         kprintf("allocated: ");
2043                         while ((atblk = hammer_alist_find(live, atblk)) != HAMMER_ALIST_BLOCK_NONE) {
2044                                 kprintf(" %d", atblk);
2045                                 ++atblk;
2046                         }
2047                         kprintf("\n");
2048                         break;
2049                 case 'a':
2050                         atblk = 0;
2051                         if (sscanf(buf + 1, "%d %d", &count, &atblk) >= 1) {
2052                                 blk = hammer_alist_alloc_fwd(live, count, atblk);
2053                                 kprintf("    R=%04x\n", blk);
2054                         } else {
2055                                 kprintf("?\n");
2056                         }
2057                         break;
2058                 case 'r':
2059                         atblk = HAMMER_ALIST_BLOCK_MAX;
2060                         if (sscanf(buf + 1, "%d %d", &count, &atblk) >= 1) {
2061                                 blk = hammer_alist_alloc_rev(live, count, atblk);
2062                                 kprintf("    R=%04x\n", blk);
2063                         } else {
2064                                 kprintf("?\n");
2065                         }
2066                         break;
2067                 case 'f':
2068                         if (sscanf(buf + 1, "%x %d", &da, &count) == 2) {
2069                                 hammer_alist_free(live, da, count);
2070                                 if (hammer_alist_isempty(live))
2071                                         kprintf("a-list is now 100%% empty\n");
2072                         } else {
2073                                 kprintf("?\n");
2074                         }
2075                         break;
2076                 case 'R':
2077                         {
2078                                 int n;
2079
2080                                 n = hammer_alist_recover(live, 0, 0,
2081                                            live->meta->bm_alist_base_freeblks);
2082                                 if (n < 0)
2083                                         kprintf("recover: error %d\n", -n);
2084                                 else
2085                                         kprintf("recover: %d free\n", n);
2086                         }
2087                         break;
2088                 case '?':
2089                 case 'h':
2090                         puts(
2091                             "p          -print\n"
2092                             "a %d       -allocate\n"
2093                             "r %d       -allocate reverse\n"
2094                             "f %x %d    -free\n"
2095                             "R          -recovery a-list\n"
2096                             "h/?        -help"
2097                         );
2098                         break;
2099                 default:
2100                         kprintf("?\n");
2101                         break;
2102                 }
2103         }
2104         return(0);
2105 }
2106
2107 void
2108 panic(const char *ctl, ...)
2109 {
2110         __va_list va;
2111
2112         __va_start(va, ctl);
2113         vfprintf(stderr, ctl, va);
2114         fprintf(stderr, "\n");
2115         __va_end(va);
2116         exit(1);
2117 }
2118
2119 #endif
2120