HAMMER 16/many - Recovery infrastructure, misc bug fixes
[dragonfly.git] / sys / vfs / hammer / hammer_alist.c
1 /*
2  * HAMMER_ALIST.C -
3  *
4  * Bitmap allocator/deallocator, using a radix tree with hinting.
5  * Unlimited-size allocations, power-of-2 only, power-of-2 aligned results
6  * only.  This module has been separated from the generic kernel module and
7  * written specifically for embedding in HAMMER storage structures.
8  * 
9  * Copyright (c) 2007 The DragonFly Project.  All rights reserved.
10  * 
11  * This code is derived from software contributed to The DragonFly Project
12  * by Matthew Dillon <dillon@backplane.com>
13  * 
14  * Redistribution and use in source and binary forms, with or without
15  * modification, are permitted provided that the following conditions
16  * are met:
17  * 
18  * 1. Redistributions of source code must retain the above copyright
19  *    notice, this list of conditions and the following disclaimer.
20  * 2. Redistributions in binary form must reproduce the above copyright
21  *    notice, this list of conditions and the following disclaimer in
22  *    the documentation and/or other materials provided with the
23  *    distribution.
24  * 3. Neither the name of The DragonFly Project nor the names of its
25  *    contributors may be used to endorse or promote products derived
26  *    from this software without specific, prior written permission.
27  * 
28  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
29  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
30  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
31  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE
32  * COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
33  * INCIDENTAL, SPECIAL, EXEMPLARY OR CONSEQUENTIAL DAMAGES (INCLUDING,
34  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
35  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
36  * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
37  * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
38  * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
39  * SUCH DAMAGE.
40  *
41  * $DragonFly: src/sys/vfs/hammer/Attic/hammer_alist.c,v 1.7 2008/01/09 00:46:22 dillon Exp $
42  */
43 /*
44  * This module implements a generic allocator through the use of a hinted
45  * radix tree.  All allocations must be in powers of 2 and will return
46  * similarly aligned results.  The radix tree typically recurses within
47  * a memory buffer and then continues its recursion by chaining to other
48  * memory buffers, creating a seemless whole capable of managing any amount
49  * of storage.
50  *
51  * The radix tree is layed out recursively using a linear array.  Each meta
52  * node is immediately followed (layed out sequentially in memory) by
53  * HAMMER_ALIST_META_RADIX lower level nodes.  This is a recursive structure
54  * but one that can be easily scanned through a very simple 'skip'
55  * calculation.
56  *
57  * The radix tree supports an early-termination optimization which
58  * effectively allows us to efficiently mix large and small allocations
59  * with a single abstraction.  The address space can be partitioned
60  * arbitrarily without adding much in the way of additional meta-storage
61  * for the allocator.
62  *
63  * The radix tree supports allocator layering. By supplying a base_radix > 1
64  * the allocator will issue callbacks to recurse into lower layers once 
65  * higher layers have been exhausted.
66  *
67  * ALLOCATIONS IN MULTIPLES OF base_radix WILL BE ENTIRELY RETAINED IN THE
68  * HIGHER LEVEL ALLOCATOR AND NEVER RECURSE.  This means the init function
69  * will never be called and the A-list will consider the underlying zone
70  * as being uninitialized.  If you then do a partial free, the A-list will
71  * call the init function before freeing.  Most users of this API, including
72  * HAMMER, only allocate and free whole zones, or only allocate and free
73  * partial zones, and never mix their metaphors.
74  *
75  * This code can be compiled stand-alone for debugging.
76  */
77
78 #ifdef _KERNEL
79
80 #include <sys/param.h>
81 #include <sys/systm.h>
82 #include <sys/lock.h>
83 #include <sys/kernel.h>
84 #include <sys/malloc.h>
85 #include <vm/vm.h>
86 #include <vm/vm_object.h>
87 #include <vm/vm_kern.h>
88 #include <vm/vm_extern.h>
89 #include <vm/vm_page.h>
90
91 #include "hammer_alist.h"
92 #include "hammer_disk.h"
93
94 #else
95
96 #ifndef ALIST_NO_DEBUG
97 #define ALIST_DEBUG
98 #endif
99
100 #include <sys/types.h>
101 #include <stdio.h>
102 #include <assert.h>
103 #include <string.h>
104 #include <stdlib.h>
105 #include <stdarg.h>
106
107 #define kmalloc(a,b,c)  malloc(a)
108 #define kfree(a,b)      free(a)
109 #define kprintf         printf
110 #define KKASSERT(exp)   assert(exp)
111 struct malloc_type;
112
113 #include "hammer_alist.h"
114
115 void panic(const char *ctl, ...);
116
117 #endif
118
119 /*
120  * static support functions
121  */
122 static int32_t hammer_alst_leaf_alloc_fwd(hammer_almeta_t scan,
123                                         int32_t blk, int count, int32_t atblk);
124 static int32_t hammer_alst_meta_alloc_fwd(hammer_alist_t live,
125                                         hammer_almeta_t scan,
126                                         int32_t blk, int32_t count,
127                                         int32_t radix, int skip, int32_t atblk);
128 static int32_t hammer_alst_leaf_alloc_rev(hammer_almeta_t scan,
129                                         int32_t blk, int count, int32_t atblk);
130 static int32_t hammer_alst_meta_alloc_rev(hammer_alist_t live,
131                                         hammer_almeta_t scan,
132                                         int32_t blk, int32_t count,
133                                         int32_t radix, int skip, int32_t atblk);
134 static int32_t hammer_alst_find(hammer_alist_t live, hammer_almeta_t scan,
135                                         int32_t blk, int32_t radix,
136                                         int32_t skip, int32_t atblk);
137 static void hammer_alst_leaf_free(hammer_almeta_t scan, int32_t relblk,
138                                         int count);
139 static void hammer_alst_meta_free(hammer_alist_t live, hammer_almeta_t scan,
140                                         int32_t freeBlk, int32_t count, 
141                                         int32_t radix, int skip, int32_t blk);
142 static int32_t  hammer_alst_radix_init(hammer_almeta_t scan,
143                                         int32_t radix, int skip, int32_t count);
144 static void     hammer_alst_radix_recover(hammer_alist_recover_t info,
145                                         hammer_almeta_t scan, int32_t blk,
146                                         int32_t radix, int skip, int32_t count);
147 #ifdef ALIST_DEBUG
148 static void     hammer_alst_radix_print(hammer_alist_t live,
149                                         hammer_almeta_t scan, int32_t blk,
150                                         int32_t radix, int skip, int tab);
151 #endif
152
153 /*
154  * Initialize an a-list config structure for use.  The config structure
155  * describes the basic structure of an a-list's topology and may be
156  * shared by any number of a-lists which have the same topology.
157  *
158  * blocks is the total number of blocks, that is the number of blocks
159  * handled at this layer multiplied by the base radix.
160  *
161  * When base_radix != 1 the A-list has only meta elements and does not have
162  * any leaves, in order to be able to track partial allocations.
163  */
164 void
165 hammer_alist_template(hammer_alist_config_t bl, int32_t blocks,
166                       int32_t base_radix, int32_t maxmeta)
167 {
168         int radix;
169         int skip;
170
171         /*
172          * Calculate radix and skip field used for scanning.  The leaf nodes
173          * in our tree are either BMAP or META nodes depending on whether
174          * we chain to a lower level allocation layer or not.
175          */
176         if (base_radix == 1)
177                 radix = HAMMER_ALIST_BMAP_RADIX;
178         else
179                 radix = HAMMER_ALIST_META_RADIX;
180         skip = 1;
181
182         while (radix < blocks / base_radix) {
183                 radix *= HAMMER_ALIST_META_RADIX;
184                 skip = skip * HAMMER_ALIST_META_RADIX + 1;
185         }
186
187         /*
188          * Increase the radix based on the number of blocks a lower level
189          * allocator is able to handle at the 'base' of our allocator.
190          * If base_radix != 1 the caller will have to initialize the callback
191          * fields to implement the lower level allocator.
192          */
193         KKASSERT((int64_t)radix * (int64_t)base_radix < 0x80000000LL);
194         radix *= base_radix;
195
196         bzero(bl, sizeof(*bl));
197
198         bl->bl_blocks = blocks;
199         bl->bl_base_radix = base_radix;
200         bl->bl_radix = radix;
201         bl->bl_skip = skip;
202         bl->bl_rootblks = hammer_alst_radix_init(NULL, bl->bl_radix,
203                                                  bl->bl_skip, blocks);
204         ++bl->bl_rootblks;      /* one more for freeblks header */
205         if (base_radix == 1)
206                 bl->bl_terminal = 1;
207         KKASSERT(maxmeta == 0 || bl->bl_rootblks <= maxmeta);
208
209 #if defined(ALIST_DEBUG)
210         kprintf(
211                 "PRIMARY ALIST LAYER manages %d blocks"
212                 ", requiring %dK (%d bytes) of ram\n",
213                 bl->bl_blocks / bl->bl_base_radix,
214                 (bl->bl_rootblks * sizeof(struct hammer_almeta) + 1023) / 1024,
215                 (bl->bl_rootblks * sizeof(struct hammer_almeta))
216         );
217         kprintf("ALIST raw radix tree contains %d records\n", bl->bl_rootblks);
218 #endif
219 }
220
221 /*
222  * Initialize a new A-list
223  */
224 void
225 hammer_alist_init(hammer_alist_t live, int32_t start, int32_t count,
226                   enum hammer_alloc_state state)
227 {
228         hammer_alist_config_t bl = live->config;
229
230         /*
231          * Note: base_freeblks is a count, not a block number limit.
232          */
233         live->meta->bm_alist_freeblks = 0;
234         live->meta->bm_alist_base_freeblks = count;
235         hammer_alst_radix_init(live->meta + 1, bl->bl_radix,
236                                bl->bl_skip, bl->bl_blocks);
237         if (count && state == HAMMER_ASTATE_FREE)
238                 hammer_alist_free(live, start, count);
239 }
240
241 #if !defined(_KERNEL) && defined(ALIST_DEBUG)
242
243 /*
244  * hammer_alist_create()        (userland only)
245  *
246  *      create a alist capable of handling up to the specified number of
247  *      blocks.  blocks must be greater then 0
248  *
249  *      The smallest alist consists of a single leaf node capable of 
250  *      managing HAMMER_ALIST_BMAP_RADIX blocks, or (if base_radix != 1)
251  *      a single meta node capable of managing HAMMER_ALIST_META_RADIX
252  *      blocks which recurses into other storage layers for a total
253  *      handling capability of (HAMMER_ALIST_META_RADIX * base_radix) blocks.
254  *
255  *      Larger a-list's increase their capability exponentially by
256  *      HAMMER_ALIST_META_RADIX.
257  *
258  *      The block count is the total number of blocks inclusive of any
259  *      layering.  blocks can be less then base_radix and will result in
260  *      a radix tree with a single leaf node which then chains down.
261  */
262
263 hammer_alist_t 
264 hammer_alist_create(int32_t blocks, int32_t base_radix,
265                     struct malloc_type *mtype, enum hammer_alloc_state state)
266 {
267         hammer_alist_t live;
268         hammer_alist_config_t bl;
269         size_t metasize;
270
271         live = kmalloc(sizeof(*live), mtype, M_WAITOK);
272         live->config = bl = kmalloc(sizeof(*bl), mtype, M_WAITOK);
273         hammer_alist_template(bl, blocks, base_radix, 0);
274
275         metasize = sizeof(*live->meta) * bl->bl_rootblks;
276         live->meta = kmalloc(metasize, mtype, M_WAITOK);
277         bzero(live->meta, metasize);
278
279 #if defined(ALIST_DEBUG)
280         kprintf(
281                 "ALIST representing %d blocks (%d MB of swap)"
282                 ", requiring %dK (%d bytes) of ram\n",
283                 bl->bl_blocks,
284                 bl->bl_blocks * 4 / 1024,
285                 (bl->bl_rootblks * sizeof(*live->meta) + 1023) / 1024,
286                 (bl->bl_rootblks * sizeof(*live->meta))
287         );
288         if (base_radix != 1) {
289                 kprintf("ALIST recurses when it reaches a base_radix of %d\n",
290                         base_radix);
291         }
292         kprintf("ALIST raw radix tree contains %d records\n", bl->bl_rootblks);
293 #endif
294         hammer_alist_init(live, 0, blocks, state);
295         return(live);
296 }
297
298 void
299 hammer_alist_destroy(hammer_alist_t live, struct malloc_type *mtype)
300 {
301         kfree(live->config, mtype);
302         kfree(live->meta, mtype);
303         live->config = NULL;
304         live->meta = NULL;
305         kfree(live, mtype);
306 }
307
308 #endif
309
310 /*
311  * hammer_alist_alloc()
312  *
313  *      Reserve space in the block bitmap.  Return the base of a contiguous
314  *      region or HAMMER_ALIST_BLOCK_NONE if space could not be allocated.
315  */
316
317 int32_t 
318 hammer_alist_alloc(hammer_alist_t live, int32_t count)
319 {
320         int32_t blk = HAMMER_ALIST_BLOCK_NONE;
321         hammer_alist_config_t bl = live->config;
322
323         KKASSERT((count | (count - 1)) == (count << 1) - 1);
324
325         if (bl && count <= bl->bl_radix) {
326                 /*
327                  * When skip is 1 we are at a leaf.  If we are the terminal
328                  * allocator we use our native leaf functions and radix will
329                  * be HAMMER_ALIST_BMAP_RADIX.  Otherwise we are a meta node
330                  * which will chain to another allocator.
331                  */
332                 if (bl->bl_skip == 1 && bl->bl_terminal) {
333 #ifndef _KERNEL
334                         KKASSERT(bl->bl_radix == HAMMER_ALIST_BMAP_RADIX);
335 #endif
336                         blk = hammer_alst_leaf_alloc_fwd(
337                                     live->meta + 1, 0, count, 0);
338                 } else {
339                         blk = hammer_alst_meta_alloc_fwd(
340                                     live, live->meta + 1,
341                                     0, count, bl->bl_radix, bl->bl_skip, 0);
342                 }
343                 if (blk != HAMMER_ALIST_BLOCK_NONE)
344                         live->meta->bm_alist_freeblks -= count;
345         }
346         return(blk);
347 }
348
349 int32_t 
350 hammer_alist_alloc_fwd(hammer_alist_t live, int32_t count, int32_t atblk)
351 {
352         int32_t blk = HAMMER_ALIST_BLOCK_NONE;
353         hammer_alist_config_t bl = live->config;
354
355         KKASSERT((count | (count - 1)) == (count << 1) - 1);
356
357         if (bl && count <= bl->bl_radix) {
358                 /*
359                  * When skip is 1 we are at a leaf.  If we are the terminal
360                  * allocator we use our native leaf functions and radix will
361                  * be HAMMER_ALIST_BMAP_RADIX.  Otherwise we are a meta node
362                  * which will chain to another allocator.
363                  */
364                 if (bl->bl_skip == 1 && bl->bl_terminal) {
365 #ifndef _KERNEL
366                         KKASSERT(bl->bl_radix == HAMMER_ALIST_BMAP_RADIX);
367 #endif
368                         blk = hammer_alst_leaf_alloc_fwd(
369                                     live->meta + 1, 0, count, atblk);
370                 } else {
371                         blk = hammer_alst_meta_alloc_fwd(
372                                     live, live->meta + 1,
373                                     0, count, bl->bl_radix, bl->bl_skip, atblk);
374                 }
375                 if (blk != HAMMER_ALIST_BLOCK_NONE)
376                         live->meta->bm_alist_freeblks -= count;
377         }
378         return(blk);
379 }
380
381 int32_t 
382 hammer_alist_alloc_rev(hammer_alist_t live, int32_t count, int32_t atblk)
383 {
384         hammer_alist_config_t bl = live->config;
385         int32_t blk = HAMMER_ALIST_BLOCK_NONE;
386
387         KKASSERT((count | (count - 1)) == (count << 1) - 1);
388
389         if (bl && count < bl->bl_radix) {
390                 if (bl->bl_skip == 1 && bl->bl_terminal) {
391 #ifndef _KERNEL
392                         KKASSERT(bl->bl_radix == HAMMER_ALIST_BMAP_RADIX);
393 #endif
394                         blk = hammer_alst_leaf_alloc_rev(
395                                     live->meta + 1, 0, count, atblk);
396                 } else {
397                         blk = hammer_alst_meta_alloc_rev(
398                                     live, live->meta + 1,
399                                     0, count, bl->bl_radix, bl->bl_skip, atblk);
400                 }
401                 if (blk != HAMMER_ALIST_BLOCK_NONE)
402                         live->meta->bm_alist_freeblks -= count;
403         }
404         return(blk);
405 }
406
407 /*
408  * hammer_alist_find()
409  *
410  *      Locate the first block >= atblk marked as allocated in the A-list
411  *      and return it.  Return HAMMER_ALIST_BLOCK_NONE if no block could
412  *      be found.
413  */
414 int32_t
415 hammer_alist_find(hammer_alist_t live, int32_t atblk)
416 {
417         hammer_alist_config_t bl = live->config;
418         KKASSERT(live->config != NULL);
419         KKASSERT(atblk >= 0);
420         atblk = hammer_alst_find(live, live->meta + 1, 0, bl->bl_radix,
421                                  bl->bl_skip, atblk);
422         return(atblk);
423 }
424
425 /*
426  * hammer_alist_free()
427  *
428  *      Free up space in the block bitmap.  Return the base of a contiguous
429  *      region.  Panic if an inconsistancy is found.
430  *
431  *      Unlike allocations, there are no alignment requirements for blkno or
432  *      count when freeing blocks.
433  */
434
435 void 
436 hammer_alist_free(hammer_alist_t live, int32_t blkno, int32_t count)
437 {
438         hammer_alist_config_t bl = live->config;
439
440         KKASSERT(blkno + count <= bl->bl_blocks);
441         if (bl->bl_skip == 1 && bl->bl_terminal) {
442 #ifndef _KERNEL
443                 KKASSERT(bl->bl_radix == HAMMER_ALIST_BMAP_RADIX);
444 #endif
445                 hammer_alst_leaf_free(live->meta + 1, blkno, count);
446         } else {
447                 hammer_alst_meta_free(live, live->meta + 1,
448                                       blkno, count,
449                                       bl->bl_radix, bl->bl_skip, 0);
450         }
451         live->meta->bm_alist_freeblks += count;
452 }
453
454 /*
455  * Recover an A-list.  This will dive down to the leaves and regenerate
456  * the hints and the freeblks count.  This function will also recurse
457  * through any stacked A-lists.  > 0 is returned on success, a negative
458  * error code on failure.
459  *
460  * Since A-lists have no pointers the only thing that can prevent recovery
461  * is an I/O error in e.g. a stacked A-list.  This doesn't mean the recovered
462  * map will be meaningful, however.
463  *
464  * blk is usually passed as 0 at the top level and is adjusted as the recovery
465  * code scans the A-list.  It is only used when recursing down a stacked
466  * A-list.
467  */
468 int
469 hammer_alist_recover(hammer_alist_t live, int32_t blk, int32_t start,
470                      int32_t count)
471 {
472         hammer_alist_config_t bl = live->config;
473         struct hammer_alist_recover info;
474         int32_t r;
475
476         info.live = live;
477         info.error = 0;
478
479         live->meta->bm_alist_freeblks = 0;
480         live->meta->bm_alist_base_freeblks = count;
481         hammer_alst_radix_recover(&info, live->meta + 1, blk, bl->bl_radix,
482                                   bl->bl_skip, bl->bl_blocks);
483         if (info.error)
484                 return(info.error);
485
486         /*
487          * Any garbage between 0 and start, and >= start, is removed.
488          */
489         while ((r = hammer_alist_find(live, 0)) != HAMMER_ALIST_BLOCK_NONE &&
490                r < start) {
491                 hammer_alist_free(live, r, 1);
492         }
493         while ((r = hammer_alist_find(live, start + count)) !=
494                 HAMMER_ALIST_BLOCK_NONE) {
495                 hammer_alist_free(live, r, 1);
496         }
497         return(live->meta->bm_alist_freeblks);
498 }
499
500 int
501 hammer_alist_isfull(hammer_alist_t live)
502 {
503         return(live->meta->bm_alist_freeblks == 0);
504 }
505
506 int
507 hammer_alist_isempty(hammer_alist_t live)
508 {
509         return((int)live->meta->bm_alist_freeblks ==
510                live->meta->bm_alist_base_freeblks);
511 }
512
513 #ifdef ALIST_DEBUG
514
515 /*
516  * alist_print()    - dump radix tree
517  */
518
519 void
520 hammer_alist_print(hammer_alist_t live, int tab)
521 {
522         hammer_alist_config_t bl = live->config;
523
524         kprintf("%*.*sALIST (%d/%d free blocks) {\n",
525                 tab, tab, "",
526                 live->meta->bm_alist_freeblks,
527                 live->meta->bm_alist_base_freeblks);
528         hammer_alst_radix_print(live, live->meta + 1, 0,
529                                 bl->bl_radix, bl->bl_skip, tab + 4);
530         kprintf("%*.*s}\n", tab, tab, "");
531 }
532
533 #endif
534
535 /************************************************************************
536  *                        ALLOCATION SUPPORT FUNCTIONS                  *
537  ************************************************************************
538  *
539  *      These support functions do all the actual work.  They may seem 
540  *      rather longish, but that's because I've commented them up.  The
541  *      actual code is straight forward.
542  *
543  */
544
545 /*
546  * hammer_alist_leaf_alloc_fwd()
547  *
548  *      Allocate at a leaf in the radix tree (a bitmap).
549  *
550  *      This is the core of the allocator and is optimized for the 1 block
551  *      and the HAMMER_ALIST_BMAP_RADIX block allocation cases.  Other cases
552  *      are somewhat slower.  The 1 block allocation case is log2 and extremely
553  *      quick.
554  */
555
556 static int32_t
557 hammer_alst_leaf_alloc_fwd(hammer_almeta_t scan, int32_t blk,
558                            int count, int32_t atblk)
559 {
560         u_int32_t orig = scan->bm_bitmap;
561         int32_t saveblk = blk;
562
563         /*
564          * Optimize bitmap all-allocated case.  Also, count = 1
565          * case assumes at least 1 bit is free in the bitmap, so
566          * we have to take care of this case here.
567          */
568         if (orig == 0) {
569                 scan->bm_bighint = 0;
570                 return(HAMMER_ALIST_BLOCK_NONE);
571         }
572
573 #if 0
574         /*
575          * Optimized code to allocate one bit out of the bitmap
576          *
577          * mask iterates e.g. 00001111 00000011 00000001
578          *
579          * mask starts at 00001111
580          */
581         if (count == 1) {
582                 u_int32_t mask;
583                 int j = HAMMER_ALIST_BMAP_RADIX/2;
584                 int r = 0;
585
586                 mask = (u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX/2);
587
588                 while (j) {
589                         if ((orig & mask) == 0) {
590                             r += j;
591                             orig >>= j;
592                         }
593                         j >>= 1;
594                         mask >>= j;
595                 }
596                 scan->bm_bitmap &= ~(1 << r);
597                 return(blk + r);
598         }
599 #endif
600
601         /*
602          * non-optimized code to allocate N bits out of the bitmap.
603          * The more bits, the faster the code runs.  It will run
604          * the slowest allocating 2 bits, but since there aren't any
605          * memory ops in the core loop (or shouldn't be, anyway),
606          * you probably won't notice the difference.
607          *
608          * Similar to the blist case, the alist code also requires
609          * allocations to be power-of-2 sized and aligned to the
610          * size of the allocation, which simplifies the algorithm.
611          */
612         {
613                 int j;
614                 int n = HAMMER_ALIST_BMAP_RADIX - count;
615                 u_int32_t mask;
616
617                 mask = (u_int32_t)-1 >> n;
618
619                 for (j = 0; j <= n; j += count) {
620                         if ((orig & mask) == mask && blk >= atblk) {
621                                 scan->bm_bitmap &= ~mask;
622                                 return(blk);
623                         }
624                         mask = mask << count;
625                         blk += count;
626                 }
627         }
628
629         /*
630          * We couldn't allocate count in this subtree, update bighint if
631          * atblk didn't interfere with the hinting mechanism.
632          */
633         if (saveblk >= atblk)
634                 scan->bm_bighint = count - 1;
635         return(HAMMER_ALIST_BLOCK_NONE);
636 }
637
638 /*
639  * This version allocates blocks in the reverse direction.
640  */
641 static int32_t
642 hammer_alst_leaf_alloc_rev(hammer_almeta_t scan, int32_t blk,
643                            int count, int32_t atblk)
644 {
645         u_int32_t orig = scan->bm_bitmap;
646         int32_t saveblk;
647
648         /*
649          * Optimize bitmap all-allocated case.  Also, count = 1
650          * case assumes at least 1 bit is free in the bitmap, so
651          * we have to take care of this case here.
652          */
653         if (orig == 0) {
654                 scan->bm_bighint = 0;
655                 return(HAMMER_ALIST_BLOCK_NONE);
656         }
657
658 #if 0
659         /*
660          * Optimized code to allocate one bit out of the bitmap
661          */
662         if (count == 1) {
663                 u_int32_t mask;
664                 int j = HAMMER_ALIST_BMAP_RADIX/2;
665                 int r = HAMMER_ALIST_BMAP_RADIX - 1;
666
667                 mask = ~((u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX/2));
668
669                 while (j) {
670                         if ((orig & mask) == 0) {
671                             r -= j;
672                             orig <<= j;
673                         }
674                         j >>= 1;
675                         mask <<= j;
676                 }
677                 scan->bm_bitmap &= ~(1 << r);
678                 return(blk + r);
679         }
680 #endif
681
682         /*
683          * non-optimized code to allocate N bits out of the bitmap.
684          * The more bits, the faster the code runs.  It will run
685          * the slowest allocating 2 bits, but since there aren't any
686          * memory ops in the core loop (or shouldn't be, anyway),
687          * you probably won't notice the difference.
688          *
689          * Similar to the blist case, the alist code also requires
690          * allocations to be power-of-2 sized and aligned to the
691          * size of the allocation, which simplifies the algorithm.
692          *
693          * initial mask if count == 2:  1100....0000
694          */
695         {
696                 int j;
697                 int n = HAMMER_ALIST_BMAP_RADIX - count;
698                 u_int32_t mask;
699
700                 mask = ((u_int32_t)-1 >> n) << n;
701                 blk += n;
702                 saveblk = blk;
703
704                 for (j = n; j >= 0; j -= count) {
705                         if ((orig & mask) == mask && blk <= atblk) {
706                                 scan->bm_bitmap &= ~mask;
707                                 return(blk);
708                         }
709                         mask = mask >> count;
710                         blk -= count;
711                 }
712         }
713
714         /*
715          * We couldn't allocate count in this subtree, update bighint if
716          * atblk didn't interfere with it.
717          */
718         if (saveblk <= atblk)
719                 scan->bm_bighint = count - 1;
720         return(HAMMER_ALIST_BLOCK_NONE);
721 }
722
723 /*
724  * hammer_alist_meta_alloc_fwd()
725  *
726  *      Allocate at a meta in the radix tree.
727  *
728  *      Attempt to allocate at a meta node.  If we can't, we update
729  *      bighint and return a failure.  Updating bighint optimize future
730  *      calls that hit this node.  We have to check for our collapse cases
731  *      and we have a few optimizations strewn in as well.
732  */
733 static int32_t
734 hammer_alst_meta_alloc_fwd(hammer_alist_t live, hammer_almeta_t scan,
735                            int32_t blk, int32_t count,
736                            int32_t radix, int skip, int32_t atblk
737 ) {
738         hammer_alist_config_t bl;
739         u_int32_t mask;
740         u_int32_t pmask;
741         int32_t saveblk;
742         int next_skip;
743         int i;
744
745         /*
746          * ALL-ALLOCATED special case
747          */
748         if (scan->bm_bitmap == 0)  {
749                 scan->bm_bighint = 0;
750                 return(HAMMER_ALIST_BLOCK_NONE);
751         }
752
753         radix /= HAMMER_ALIST_META_RADIX;
754         bl = live->config;
755
756         /*
757          * Radix now represents each bitmap entry for this meta node.  If
758          * the number of blocks being allocated can be fully represented,
759          * we allocate directly out of this meta node.
760          *
761          * Meta node bitmaps use 2 bits per block.
762          *
763          *      00      ALL-ALLOCATED - UNINITIALIZED
764          *      01      PARTIALLY-FREE/PARTIALLY-ALLOCATED
765          *      10      ALL-ALLOCATED - INITIALIZED
766          *      11      ALL-FREE      - UNINITIALIZED
767          */
768         if (count >= radix) {
769                 int n = count / radix * 2;      /* number of bits */
770                 int nd2 = n / 2;
771                 int j;
772
773                 mask = (u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX - n);
774                 saveblk = blk;
775                 for (j = 0; j < (int)HAMMER_ALIST_META_RADIX; j += nd2) {
776                         if ((scan->bm_bitmap & mask) == mask && blk >= atblk) {
777                                 /*
778                                  * NOTE: Marked all-allocate/uninitialized
779                                  * rather then all-allocated/initialized.
780                                  * See the warning at the top of the file.
781                                  */
782                                 scan->bm_bitmap &= ~mask;
783                                 return(blk);
784                         }
785                         mask <<= n;
786                         blk += radix * nd2;
787                 }
788                 if (scan->bm_bighint >= count && saveblk >= atblk)
789                         scan->bm_bighint = count >> 1;
790                 return(HAMMER_ALIST_BLOCK_NONE);
791         }
792
793         /*
794          * If the count is too big we couldn't allocate anything from a
795          * recursion even if the sub-tree were entirely free.
796          */
797         saveblk = blk;
798         if (count > radix)
799                 goto failed;
800
801         /*
802          * If not we have to recurse.
803          */
804         mask = 0x00000003;
805         pmask = 0x00000001;
806
807         if (skip == 1) {
808                 /*
809                  * If skip is 1 we are a meta leaf node, which means there
810                  * is another allocation layer we have to dive down into.
811                  */
812                 for (i = 0; i < (int)HAMMER_ALIST_META_RADIX; ++i) {
813                         /*
814                          * If the next layer is completely free then call
815                          * its init function to initialize it.
816                          */
817                         if ((scan->bm_bitmap & mask) == mask &&
818                             blk + radix > atblk) {
819                                 if (bl->bl_radix_init(live->info, blk, radix, HAMMER_ASTATE_FREE) == 0) {
820                                         /*
821                                          * NOTE: Marked all-allocate/uninit-
822                                          * ialized rather then all-allocated/
823                                          * initialized.  See the warning at
824                                          * the top of the file.
825                                          */
826                                         scan->bm_bitmap &= ~mask;
827                                         scan->bm_bitmap |= pmask;
828                                 }
829                         }
830
831                         /*
832                          * If there may be some free blocks try to allocate
833                          * out of the layer.  If the layer indicates that
834                          * it is completely full then clear both bits to
835                          * propogate the condition.
836                          */
837                         if ((scan->bm_bitmap & mask) == pmask &&
838                             blk + radix > atblk) {
839                                 int32_t r;
840                                 int32_t full;
841
842                                 r = bl->bl_radix_alloc_fwd(live->info, blk,
843                                                            radix, count, atblk,
844                                                            &full);
845                                 if (r != HAMMER_ALIST_BLOCK_NONE) {
846                                         if (full)
847                                                 scan->bm_bitmap &= ~mask;
848                                         return(r);
849                                 }
850                         }
851                         blk += radix;
852                         mask <<= 2;
853                         pmask <<= 2;
854                 }
855         } else {
856                 /*
857                  * Otherwise there are sub-records in the current a-list
858                  * layer.  We can also peek into the sub-layers to get
859                  * more accurate size hints.
860                  */
861                 next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
862                 for (i = 1; i < skip; i += next_skip) {
863                         if (scan[i].bm_bighint == (int32_t)-1) {
864                                 /* 
865                                  * Terminator
866                                  */
867                                 break;
868                         }
869
870                         /*
871                          * Initialize bitmap if allocating from the all-free
872                          * case.
873                          */
874                         if ((scan->bm_bitmap & mask) == mask) {
875                                 scan[i].bm_bitmap = (u_int32_t)-1;
876                                 scan[i].bm_bighint = radix;
877                         }
878
879                         if (count <= scan[i].bm_bighint &&
880                             blk + radix > atblk) {
881                                 /*
882                                  * count fits in object, recurse into the
883                                  * next layer.  If the next_skip is 1 it
884                                  * will be either a normal leaf or a meta
885                                  * leaf.
886                                  */
887                                 int32_t r;
888
889                                 if (next_skip == 1 && bl->bl_terminal) {
890                                         r = hammer_alst_leaf_alloc_fwd(
891                                                 &scan[i], blk, count, atblk);
892                                 } else {
893                                         r = hammer_alst_meta_alloc_fwd(
894                                                 live, &scan[i],
895                                                 blk, count,
896                                                 radix, next_skip, atblk);
897                                 }
898                                 if (r != HAMMER_ALIST_BLOCK_NONE) {
899                                         if (scan[i].bm_bitmap == 0) {
900                                                 scan->bm_bitmap &= ~mask;
901                                         } else {
902                                                 scan->bm_bitmap &= ~mask;
903                                                 scan->bm_bitmap |= pmask;
904                                         }
905                                         return(r);
906                                 }
907                         }
908                         blk += radix;
909                         mask <<= 2;
910                         pmask <<= 2;
911                 }
912         }
913
914 failed:
915         /*
916          * We couldn't allocate count in this subtree, update bighint.
917          */
918         if (scan->bm_bighint >= count && saveblk >= atblk)
919                 scan->bm_bighint = count >> 1;
920         return(HAMMER_ALIST_BLOCK_NONE);
921 }
922
923 /*
924  * This version allocates blocks in the reverse direction.
925  */
926 static int32_t
927 hammer_alst_meta_alloc_rev(hammer_alist_t live, hammer_almeta_t scan,
928                            int32_t blk, int32_t count,
929                            int32_t radix, int skip, int32_t atblk
930 ) {
931         hammer_alist_config_t bl;
932         int i;
933         int j;
934         u_int32_t mask;
935         u_int32_t pmask;
936         int32_t saveblk;
937         int next_skip;
938
939         /*
940          * ALL-ALLOCATED special case
941          */
942         if (scan->bm_bitmap == 0)  {
943                 scan->bm_bighint = 0;
944                 return(HAMMER_ALIST_BLOCK_NONE);
945         }
946
947         radix /= HAMMER_ALIST_META_RADIX;
948         bl = live->config;
949
950         /*
951          * Radix now represents each bitmap entry for this meta node.  If
952          * the number of blocks being allocated can be fully represented,
953          * we allocate directly out of this meta node.
954          *
955          * Meta node bitmaps use 2 bits per block.
956          *
957          *      00      ALL-ALLOCATED (uninitialized)
958          *      01      PARTIALLY-FREE/PARTIALLY-ALLOCATED
959          *      10      ALL-ALLOCATED (initialized)
960          *      11      ALL-FREE
961          */
962         if (count >= radix) {
963                 int n = count / radix * 2;      /* number of bits */
964                 int nd2 = n / 2;                /* number of radi */
965
966                 /*
967                  * Initial mask if e.g. n == 2:  1100....0000
968                  */
969                 mask = (u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX - n) <<
970                         (HAMMER_ALIST_BMAP_RADIX - n);
971                 blk += (HAMMER_ALIST_META_RADIX - nd2) * radix;
972                 saveblk = blk;
973                 for (j = HAMMER_ALIST_META_RADIX - nd2; j >= 0; j -= nd2) {
974                         if ((scan->bm_bitmap & mask) == mask && blk <= atblk) {
975                                 scan->bm_bitmap &= ~mask;
976                                 return(blk);
977                         }
978                         mask >>= n;
979                         blk -= nd2 * radix;
980                 }
981                 if (scan->bm_bighint >= count && saveblk <= atblk)
982                         scan->bm_bighint = count >> 1;
983                 return(HAMMER_ALIST_BLOCK_NONE);
984         }
985
986         /*
987          * If the count is too big we couldn't allocate anything from a
988          * recursion even if the sub-tree were entirely free.
989          */
990         if (count > radix) {
991                 saveblk = atblk;        /* make it work for the conditional */
992                 goto failed;            /* at the failed label */
993         }
994
995         if (skip == 1) {
996                 /*
997                  * We need the recurse but we are at a meta node leaf, which
998                  * means there is another layer under us.
999                  */
1000                 mask = 0xC0000000;
1001                 pmask = 0x40000000;
1002                 blk += radix * HAMMER_ALIST_META_RADIX - radix;
1003                 saveblk = blk;
1004
1005                 for (i = 0; i < (int)HAMMER_ALIST_META_RADIX; ++i) {
1006                         /*
1007                          * If the next layer is completely free then call
1008                          * its init function to initialize it.  The init
1009                          * function is responsible for the initial freeing.
1010                          */
1011                         if ((scan->bm_bitmap & mask) == mask && blk <= atblk) {
1012                                 if (bl->bl_radix_init(live->info, blk, radix, HAMMER_ASTATE_FREE) == 0) {
1013                                         scan->bm_bitmap &= ~mask;
1014                                         scan->bm_bitmap |= pmask;
1015                                 }
1016                         }
1017
1018                         /*
1019                          * If there may be some free blocks try to allocate
1020                          * out of the layer.  If the layer indicates that
1021                          * it is completely full then clear both bits to
1022                          * propogate the condition.
1023                          */
1024                         if ((scan->bm_bitmap & mask) == pmask && blk <= atblk) {
1025                                 int32_t r;
1026                                 int32_t full;
1027
1028                                 r = bl->bl_radix_alloc_rev(live->info, blk,
1029                                                            radix, count,
1030                                                            atblk, &full);
1031                                 if (r != HAMMER_ALIST_BLOCK_NONE) {
1032                                         if (full)
1033                                                 scan->bm_bitmap &= ~mask;
1034                                         return(r);
1035                                 }
1036                         }
1037                         mask >>= 2;
1038                         pmask >>= 2;
1039                         blk -= radix;
1040                 }
1041         } else {
1042                 /*
1043                  * Since we are going in the reverse direction we need an
1044                  * extra loop to determine if there is a terminator, then
1045                  * run backwards.
1046                  *
1047                  * This is a little weird but we do not want to overflow the
1048                  * mask/pmask in the loop.
1049                  */
1050                 next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
1051                 j = 0;
1052                 for (i = 1; i < skip; i += next_skip) {
1053                         if (scan[i].bm_bighint == (int32_t)-1) {
1054                                 KKASSERT(j != 0);
1055                                 break;
1056                         }
1057                         blk += radix;
1058                         j += 2;
1059                 }
1060                 blk -= radix;
1061                 j -= 2;
1062                 mask = 0x00000003 << j;
1063                 pmask = 0x00000001 << j;
1064                 i -= next_skip;
1065                 saveblk = blk;
1066
1067                 while (i >= 1) {
1068                         /*
1069                          * Initialize the bitmap in the child if allocating
1070                          * from the all-free case.
1071                          */
1072                         if ((scan->bm_bitmap & mask) == mask) {
1073                                 scan[i].bm_bitmap = (u_int32_t)-1;
1074                                 scan[i].bm_bighint = radix;
1075                         }
1076
1077                         /*
1078                          * Handle various count cases.  Bighint may be too
1079                          * large but is never too small.
1080                          */
1081                         if (count <= scan[i].bm_bighint && blk <= atblk) {
1082                                 /*
1083                                  * count fits in object
1084                                  */
1085                                 int32_t r;
1086                                 if (next_skip == 1 && bl->bl_terminal) {
1087                                         r = hammer_alst_leaf_alloc_rev(
1088                                                 &scan[i], blk, count, atblk);
1089                                 } else {
1090                                         r = hammer_alst_meta_alloc_rev(
1091                                                 live, &scan[i],
1092                                                 blk, count,
1093                                                 radix, next_skip, atblk);
1094                                 }
1095                                 if (r != HAMMER_ALIST_BLOCK_NONE) {
1096                                         if (scan[i].bm_bitmap == 0) {
1097                                                 scan->bm_bitmap &= ~mask;
1098                                         } else {
1099                                                 scan->bm_bitmap &= ~mask;
1100                                                 scan->bm_bitmap |= pmask;
1101                                         }
1102                                         return(r);
1103                                 }
1104                         }
1105                         blk -= radix;
1106                         mask >>= 2;
1107                         pmask >>= 2;
1108                         i -= next_skip;
1109                 }
1110         }
1111
1112 failed:
1113         /*
1114          * We couldn't allocate count in this subtree, update bighint.
1115          * Since we are restricted to powers of 2, the next highest count
1116          * we might be able to allocate is (count >> 1).
1117          */
1118         if (scan->bm_bighint >= count && saveblk <= atblk)
1119                 scan->bm_bighint = count >> 1;
1120         return(HAMMER_ALIST_BLOCK_NONE);
1121 }
1122
1123 /*
1124  * HAMMER_ALST_FIND()
1125  *
1126  * Locate the first allocated block greater or equal to atblk.
1127  */
1128 static int32_t
1129 hammer_alst_find(hammer_alist_t live, hammer_almeta_t scan, int32_t blk,
1130                  int32_t radix, int32_t skip, int32_t atblk)
1131 {
1132         u_int32_t mask;
1133         u_int32_t pmask;
1134         int32_t next_skip;
1135         int32_t tmpblk;
1136         int i;
1137         int j;
1138
1139         /*
1140          * Leaf node (currently hammer_alist_find() only works on terminal
1141          * a-list's and the case is asserted in hammer_alist_find()).
1142          */
1143         if (skip == 1 && live->config->bl_terminal) {
1144                 if (scan->bm_bitmap == (u_int32_t)-1)
1145                         return(HAMMER_ALIST_BLOCK_NONE);
1146                 for (i = 0; i < (int)HAMMER_ALIST_BMAP_RADIX; ++i) {
1147                         if (blk + i < atblk)
1148                                 continue;
1149                         if ((scan->bm_bitmap & (1 << i)) == 0)
1150                                 return(blk + i);
1151                 }
1152                 return(HAMMER_ALIST_BLOCK_NONE);
1153         }
1154
1155         /*
1156          * Meta
1157          */
1158         radix /= HAMMER_ALIST_META_RADIX;
1159         next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
1160         mask = 0x00000003;
1161         pmask = 0x00000001;
1162         for (j = 0, i = 1; j < HAMMER_ALIST_META_RADIX; (i += next_skip), ++j) {
1163                 /*
1164                  * Check Terminator
1165                  */
1166                 if (scan[i].bm_bighint == (int32_t)-1) {
1167                         break;
1168                 }
1169
1170                 /*
1171                  * Recurse if this meta might contain a desired block.
1172                  */
1173                 if (blk + radix > atblk) {
1174                         if ((scan->bm_bitmap & mask) == 0) {
1175                                 /*
1176                                  * 00 - all-allocated, uninitialized
1177                                  */
1178                                 return(atblk < blk ? blk : atblk);
1179                         } else if ((scan->bm_bitmap & mask) == (pmask << 1)) {
1180                                 /*
1181                                  * 10 - all-allocated, initialized
1182                                  */
1183                                 return(atblk < blk ? blk : atblk);
1184                         } else if ((scan->bm_bitmap & mask) == mask) {
1185                                 /* 
1186                                  * 11 - all-free (skip)
1187                                  */
1188                         } else if (next_skip == 0) {
1189                                 /*
1190                                  * Partially allocated but we have to recurse
1191                                  * into a stacked A-list.
1192                                  */
1193                                 tmpblk = live->config->bl_radix_find(
1194                                                 live->info, blk, radix, atblk);
1195                                 if (tmpblk != HAMMER_ALIST_BLOCK_NONE)
1196                                         return(tmpblk);
1197                         } else if ((scan->bm_bitmap & mask) == pmask) {
1198                                 /*
1199                                  * 01 - partially-allocated
1200                                  */
1201                                 tmpblk = hammer_alst_find(live, &scan[i],
1202                                                           blk, radix,
1203                                                           next_skip, atblk);
1204                                 if (tmpblk != HAMMER_ALIST_BLOCK_NONE)
1205                                         return(tmpblk);
1206
1207                         }
1208                 }
1209                 mask <<= 2;
1210                 pmask <<= 2;
1211                 blk += radix;
1212         }
1213         return(HAMMER_ALIST_BLOCK_NONE);
1214 }
1215
1216 /*
1217  * HAMMER_ALST_LEAF_FREE()
1218  *
1219  *      Free allocated blocks from leaf bitmap.  The allocation code is
1220  *      restricted to powers of 2, the freeing code is not.
1221  */
1222 static void
1223 hammer_alst_leaf_free(hammer_almeta_t scan, int32_t blk, int count) {
1224         /*
1225          * free some data in this bitmap
1226          *
1227          * e.g.
1228          *      0000111111111110000
1229          *          \_________/\__/
1230          *              v        n
1231          */
1232         int n = blk & (HAMMER_ALIST_BMAP_RADIX - 1);
1233         u_int32_t mask;
1234
1235         mask = ((u_int32_t)-1 << n) &
1236             ((u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX - count - n));
1237
1238         if (scan->bm_bitmap & mask)
1239                 panic("hammer_alst_radix_free: freeing free block");
1240         scan->bm_bitmap |= mask;
1241
1242         /*
1243          * We could probably do a better job here.  We are required to make
1244          * bighint at least as large as the biggest contiguous block of 
1245          * data.  If we just shoehorn it, a little extra overhead will
1246          * be incured on the next allocation (but only that one typically).
1247          */
1248         scan->bm_bighint = HAMMER_ALIST_BMAP_RADIX;
1249 }
1250
1251 /*
1252  * BLST_META_FREE()
1253  *
1254  *      Free allocated blocks from radix tree meta info.
1255  *
1256  *      This support routine frees a range of blocks from the bitmap.
1257  *      The range must be entirely enclosed by this radix node.  If a
1258  *      meta node, we break the range down recursively to free blocks
1259  *      in subnodes (which means that this code can free an arbitrary
1260  *      range whereas the allocation code cannot allocate an arbitrary
1261  *      range).
1262  */
1263
1264 static void 
1265 hammer_alst_meta_free(hammer_alist_t live, hammer_almeta_t scan, 
1266                       int32_t freeBlk, int32_t count,
1267                       int32_t radix, int skip, int32_t blk
1268 ) {
1269         hammer_alist_config_t bl;
1270         int next_skip;
1271         u_int32_t mask;
1272         u_int32_t pmask;
1273         int i;
1274
1275         /*
1276          * Break the free down into its components.  Because it is so easy
1277          * to implement, frees are not limited to power-of-2 sizes.
1278          *
1279          * Each block in a meta-node bitmap takes two bits.
1280          */
1281         radix /= HAMMER_ALIST_META_RADIX;
1282         bl = live->config;
1283
1284         i = (freeBlk - blk) / radix;
1285         blk += i * radix;
1286         mask = 0x00000003 << (i * 2);
1287         pmask = 0x00000001 << (i * 2);
1288
1289         if (skip == 1) {
1290                 /*
1291                  * Our meta node is a leaf node, which means it must recurse
1292                  * into another allocator.
1293                  */
1294                 while (i < (int)HAMMER_ALIST_META_RADIX &&
1295                        blk < freeBlk + count) {
1296                         int32_t v;
1297
1298                         v = blk + radix - freeBlk;
1299                         if (v > count)
1300                                 v = count;
1301
1302                         if (scan->bm_bighint == (int32_t)-1)
1303                                 panic("hammer_alst_meta_free: freeing unexpected range");
1304                         KKASSERT((scan->bm_bitmap & mask) != mask);
1305
1306                         if (freeBlk == blk && count >= radix) {
1307                                 /*
1308                                  * Freeing an entire zone.  Only recurse if
1309                                  * the zone was initialized.  A 00 state means
1310                                  * that the zone is marked all-allocated,
1311                                  * but was never initialized.
1312                                  *
1313                                  * Then set the zone to the all-free state (11).
1314                                  */
1315                                 int32_t empty;
1316
1317                                 if (scan->bm_bitmap & mask) {
1318                                         bl->bl_radix_free(live->info, blk, radix,
1319                                                           freeBlk - blk, v, &empty);
1320                                         KKASSERT(empty);
1321                                         bl->bl_radix_destroy(live->info, blk, radix);
1322                                 }
1323                                 scan->bm_bitmap |= mask;
1324                                 scan->bm_bighint = radix * HAMMER_ALIST_META_RADIX;
1325                                 /* XXX bighint not being set properly */
1326                         } else {
1327                                 /*
1328                                  * Recursion case, partial free.  If 00 the
1329                                  * zone is marked all allocated but has never
1330                                  * been initialized, so we init it.
1331                                  */
1332                                 int32_t empty;
1333
1334                                 if ((scan->bm_bitmap & mask) == 0)
1335                                         bl->bl_radix_init(live->info, blk, radix, HAMMER_ASTATE_ALLOC);
1336                                 bl->bl_radix_free(live->info, blk, radix,
1337                                                   freeBlk - blk, v, &empty);
1338                                 if (empty) {
1339                                         scan->bm_bitmap |= mask;
1340                                         scan->bm_bighint = radix * HAMMER_ALIST_META_RADIX;
1341                                         bl->bl_radix_destroy(live->info, blk, radix);
1342                                         /* XXX bighint not being set properly */
1343                                 } else {
1344                                         scan->bm_bitmap |= pmask;
1345                                         if (scan->bm_bighint < radix / 2)
1346                                                 scan->bm_bighint = radix / 2;
1347                                         /* XXX bighint not being set properly */
1348                                 }
1349                         }
1350                         ++i;
1351                         mask <<= 2;
1352                         pmask <<= 2;
1353                         count -= v;
1354                         freeBlk += v;
1355                         blk += radix;
1356                 }
1357         } else {
1358                 next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
1359                 i = 1 + i * next_skip;
1360
1361                 while (i <= skip && blk < freeBlk + count) {
1362                         int32_t v;
1363
1364                         KKASSERT(mask != 0);
1365
1366                         v = blk + radix - freeBlk;
1367                         if (v > count)
1368                                 v = count;
1369
1370                         if (scan->bm_bighint == (int32_t)-1)
1371                                 panic("hammer_alst_meta_free: freeing unexpected range");
1372
1373                         if (freeBlk == blk && count >= radix) {
1374                                 /*
1375                                  * All-free case, no need to update sub-tree
1376                                  */
1377                                 scan->bm_bitmap |= mask;
1378                                 scan->bm_bighint = radix * HAMMER_ALIST_META_RADIX;
1379                                 /* XXX bighint not being set properly */
1380                         } else {
1381                                 /*
1382                                  * Recursion case
1383                                  */
1384                                 if (next_skip == 1 && bl->bl_terminal) {
1385                                         hammer_alst_leaf_free(&scan[i], freeBlk, v);
1386                                 } else {
1387                                         hammer_alst_meta_free(live, &scan[i],
1388                                                               freeBlk, v,
1389                                                               radix, next_skip,
1390                                                               blk);
1391                                 }
1392                                 if (scan[i].bm_bitmap == (u_int32_t)-1)
1393                                         scan->bm_bitmap |= mask;
1394                                 else
1395                                         scan->bm_bitmap |= pmask;
1396                                 if (scan->bm_bighint < scan[i].bm_bighint)
1397                                         scan->bm_bighint = scan[i].bm_bighint;
1398                         }
1399                         mask <<= 2;
1400                         pmask <<= 2;
1401                         count -= v;
1402                         freeBlk += v;
1403                         blk += radix;
1404                         i += next_skip;
1405                 }
1406         }
1407 }
1408
1409 /*
1410  * hammer_alst_radix_init()
1411  *
1412  *      Initialize our meta structures and bitmaps and calculate the exact
1413  *      number of meta-nodes required to manage 'count' blocks.  
1414  *
1415  *      The required space may be truncated due to early termination records.
1416  */
1417 static int32_t  
1418 hammer_alst_radix_init(hammer_almeta_t scan, int32_t radix,
1419                        int skip, int32_t count)
1420 {
1421         int i;
1422         int next_skip;
1423         int32_t memindex = 1;
1424         u_int32_t mask;
1425         u_int32_t pmask;
1426
1427         /*
1428          * Basic initialization of the almeta for meta or leaf nodes.  This
1429          * marks the element as all-allocated.
1430          */
1431         if (scan) {
1432                 scan->bm_bighint = 0;
1433                 scan->bm_bitmap = 0;
1434         }
1435
1436         /*
1437          * We are at a terminal node, we only eat one meta element.   If
1438          * live->config->bl_terminal is set this is a leaf node, otherwise
1439          * it is a meta node for a stacked A-list.  We do NOT recurse into
1440          * stacked A-lists but simply mark the entire stack as all-free using
1441          * code 00 (meaning all-free & uninitialized).
1442          */
1443         if (skip == 1)
1444                 return(memindex);
1445
1446         /*
1447          * Meta node.  If allocating the entire object we can special
1448          * case it.  However, we need to figure out how much memory
1449          * is required to manage 'count' blocks, so we continue on anyway.
1450          */
1451         radix /= HAMMER_ALIST_META_RADIX;
1452         next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
1453         mask = 0x00000003;
1454         pmask = 0x00000001;
1455
1456         for (i = 1; i < skip; i += next_skip) {
1457                 /*
1458                  * We eat up to this record
1459                  */
1460                 memindex = i;
1461
1462                 KKASSERT(mask != 0);
1463
1464                 if (count >= radix) {
1465                         /*
1466                          * Allocate the entire object
1467                          */
1468                         memindex += hammer_alst_radix_init(
1469                             ((scan) ? &scan[i] : NULL),
1470                             radix,
1471                             next_skip,
1472                             radix
1473                         );
1474                         count -= radix;
1475                         /* already marked as wholely allocated */
1476                 } else if (count > 0) {
1477                         /*
1478                          * Allocate a partial object
1479                          */
1480                         memindex += hammer_alst_radix_init(
1481                             ((scan) ? &scan[i] : NULL),
1482                             radix,
1483                             next_skip,
1484                             count
1485                         );
1486                         count = 0;
1487
1488                         /*
1489                          * Mark as partially allocated
1490                          */
1491                         if (scan)
1492                                 scan->bm_bitmap |= pmask;
1493                 } else {
1494                         /*
1495                          * Add terminator and break out.  The terminal
1496                          * eats the meta node at scan[i].
1497                          */
1498                         ++memindex;
1499                         if (scan)
1500                                 scan[i].bm_bighint = (int32_t)-1;
1501                         /* already marked as wholely allocated */
1502                         break;
1503                 }
1504                 mask <<= 2;
1505                 pmask <<= 2;
1506         }
1507         return(memindex);
1508 }
1509
1510 /*
1511  * hammer_alst_radix_recover()
1512  *
1513  *      This code is basically a duplicate of hammer_alst_radix_init()
1514  *      except it recovers the a-list instead of initializes it.
1515  */
1516 static void     
1517 hammer_alst_radix_recover(hammer_alist_recover_t info, hammer_almeta_t scan,
1518                           int32_t blk, int32_t radix, int skip, int32_t count)
1519 {
1520         hammer_alist_t live = info->live;
1521         u_int32_t mask;
1522         u_int32_t pmask;
1523         int next_skip;
1524         int i;
1525         int j;
1526         int n;
1527
1528         /*
1529          * Don't try to recover bighint, just set it to its maximum
1530          * value and let the A-list allocations reoptimize it.  XXX
1531          */
1532         scan->bm_bighint = radix;
1533
1534         /*
1535          * If we are at a terminal node (i.e. not stacked on top of another
1536          * A-list), just count the free blocks.
1537          */
1538         if (skip == 1 && live->config->bl_terminal) {
1539                 for (i = 0; i < (int)HAMMER_ALIST_BMAP_RADIX; ++i) {
1540                         if (scan->bm_bitmap & (1 << i))
1541                                 ++info->live->meta->bm_alist_freeblks;
1542                 }
1543                 return;
1544         }
1545
1546         /*
1547          * Recursive meta node (next_skip != 0) or terminal meta
1548          * node (next_skip == 0).
1549          */
1550         radix /= HAMMER_ALIST_META_RADIX;
1551         next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
1552         mask = 0x00000003;
1553         pmask = 0x00000001;
1554
1555         for (i = 1, j = 0; j < (int)HAMMER_ALIST_META_RADIX;
1556               ++j, (i += next_skip)) {
1557                 /*
1558                  * Check mask:
1559                  *
1560                  *      00      ALL-ALLOCATED - UNINITIALIZED
1561                  *      01      PARTIALLY-FREE/PARTIALLY-ALLOCATED
1562                  *      10      ALL-ALLOCATED - INITIALIZED
1563                  *      11      ALL-FREE      - UNINITIALIZED
1564                  */
1565                 KKASSERT(mask);
1566                 if (count >= radix) {
1567                         /*
1568                          * Recover the entire object
1569                          */
1570                         if ((scan->bm_bitmap & mask) == 0) {
1571                                 /*
1572                                  * All-allocated (uninited), do nothing
1573                                  */
1574                         } else if ((scan->bm_bitmap & mask) == mask) {
1575                                 /*
1576                                  * All-free (uninited), do nothing
1577                                  */
1578                                 live->meta->bm_alist_freeblks += radix;
1579                         } else if (next_skip) {
1580                                 /*
1581                                  * Normal meta node, initialized.  Recover and
1582                                  * adjust to either an all-allocated (inited)
1583                                  * or partially-allocated state.
1584                                  */
1585                                 hammer_alst_radix_recover(
1586                                     info,
1587                                     &scan[i],
1588                                     blk,
1589                                     radix,
1590                                     next_skip,
1591                                     radix
1592                                 );
1593                                 if (scan[i].bm_bitmap == 0) {
1594                                         scan->bm_bitmap =
1595                                             (scan->bm_bitmap & ~mask) |
1596                                             (pmask << 1);
1597                                 } else if (scan[i].bm_bitmap == (u_int32_t)-1) {
1598                                         scan->bm_bitmap |= mask;
1599                                 } else {
1600                                         scan->bm_bitmap =
1601                                             (scan->bm_bitmap & ~mask) | pmask;
1602                                 }
1603                         } else {
1604                                 /*
1605                                  * Stacked meta node, recurse.
1606                                  */
1607                                 n = live->config->bl_radix_recover(
1608                                             live->info,
1609                                             blk, radix, radix);
1610                                 if (n >= 0) {
1611                                         live->meta->bm_alist_freeblks += n;
1612                                         if (n == 0) {
1613                                                 scan->bm_bitmap =
1614                                                     (scan->bm_bitmap & ~mask) |
1615                                                     (pmask << 1);
1616                                         } else if (n == radix) {
1617                                                 scan->bm_bitmap |= mask;
1618                                         } else {
1619                                                 scan->bm_bitmap =
1620                                                     (scan->bm_bitmap & ~mask) |
1621                                                     pmask;
1622                                         }
1623                                 } else {
1624                                         info->error = n;
1625                                 }
1626                         }
1627                         count -= radix;
1628                 } else if (count > 0) {
1629                         /*
1630                          * Recover a partial object.  The object can become
1631                          * wholely allocated but never wholely free.
1632                          */
1633                         if (next_skip) {
1634                                 hammer_alst_radix_recover(
1635                                     info,
1636                                     &scan[i],
1637                                     blk,
1638                                     radix,
1639                                     next_skip,
1640                                     count
1641                                 );
1642                                 if (scan[i].bm_bitmap == 0) {
1643                                         scan->bm_bitmap =
1644                                             (scan->bm_bitmap & ~mask) |
1645                                             (pmask << 1);
1646                                 } else {
1647                                         scan->bm_bitmap =
1648                                             (scan->bm_bitmap & ~mask) | pmask;
1649                                 }
1650                         } else {
1651                                 n = live->config->bl_radix_recover(
1652                                             live->info,
1653                                             blk, radix, count);
1654                                 if (n >= 0) {
1655                                         live->meta->bm_alist_freeblks += n;
1656                                         if (n == 0) {
1657                                                 scan->bm_bitmap =
1658                                                     (scan->bm_bitmap & ~mask) |
1659                                                     (pmask << 1);
1660                                         } else {
1661                                                 scan->bm_bitmap =
1662                                                     (scan->bm_bitmap & ~mask) |
1663                                                     pmask;
1664                                         }
1665                                 } else {
1666                                         info->error = n;
1667                                 }
1668                         }
1669                         count = 0;
1670                 } else if (next_skip) {
1671                         /*
1672                          * Add terminator.  The terminator eats the meta
1673                          * node at scan[i].  There is only ONE terminator,
1674                          * make sure we don't write out any more (set count to
1675                          * -1) or we may overflow our allocation.
1676                          */
1677                         if (count == 0) {
1678                                 scan[i].bm_bighint = (int32_t)-1;
1679                                 count = -1;
1680                         }
1681                         scan->bm_bitmap &= ~mask;       /* all-allocated/uni */
1682                 } else {
1683                         scan->bm_bitmap &= ~mask;       /* all-allocated/uni */
1684                 }
1685                 mask <<= 2;
1686                 pmask <<= 2;
1687                 blk += radix;
1688         }
1689 }
1690
1691 #ifdef ALIST_DEBUG
1692
1693 static void     
1694 hammer_alst_radix_print(hammer_alist_t live, hammer_almeta_t scan,
1695                         int32_t blk, int32_t radix, int skip, int tab)
1696 {
1697         int i;
1698         int next_skip;
1699         int lastState = 0;
1700         u_int32_t mask;
1701
1702         if (skip == 1 && live->config->bl_terminal) {
1703                 kprintf(
1704                     "%*.*s(%04x,%d): bitmap %08x big=%d\n", 
1705                     tab, tab, "",
1706                     blk, radix,
1707                     scan->bm_bitmap,
1708                     scan->bm_bighint
1709                 );
1710                 return;
1711         }
1712
1713         if (scan->bm_bitmap == 0) {
1714                 kprintf(
1715                     "%*.*s(%04x,%d) ALL ALLOCATED\n",
1716                     tab, tab, "",
1717                     blk,
1718                     radix
1719                 );
1720                 return;
1721         }
1722         if (scan->bm_bitmap == (u_int32_t)-1) {
1723                 kprintf(
1724                     "%*.*s(%04x,%d) ALL FREE\n",
1725                     tab, tab, "",
1726                     blk,
1727                     radix
1728                 );
1729                 return;
1730         }
1731
1732         kprintf(
1733             "%*.*s(%04x,%d): %s (%d) bitmap=%08x big=%d {\n",
1734             tab, tab, "",
1735             blk, radix,
1736             (skip == 1 ? "LAYER" : "subtree"),
1737             radix,
1738             scan->bm_bitmap,
1739             scan->bm_bighint
1740         );
1741
1742         radix /= HAMMER_ALIST_META_RADIX;
1743         tab += 4;
1744         mask = 0x00000003;
1745
1746         if (skip == 1) {
1747                 for (i = 0; i < HAMMER_ALIST_META_RADIX; ++i) {
1748                         if ((scan->bm_bitmap & mask) == mask) {
1749                                 kprintf(
1750                                     "%*.*s(%04x,%d): ALL FREE\n",
1751                                     tab, tab, "",
1752                                     blk, radix
1753                                 );
1754                         } else if ((scan->bm_bitmap & mask) == 0) {
1755                                 kprintf(
1756                                     "%*.*s(%04x,%d): ALL ALLOCATED\n",
1757                                     tab, tab, "",
1758                                     blk, radix
1759                                 );
1760                         } else {
1761                                 live->config->bl_radix_print(
1762                                                 live->info, blk, radix, tab);
1763                         }
1764                         blk += radix;
1765                         mask <<= 2;
1766                 }
1767         } else {
1768                 next_skip = ((u_int)(skip - 1) / HAMMER_ALIST_META_RADIX);
1769
1770                 for (i = 1; i < skip; i += next_skip) {
1771                         KKASSERT(mask != 0);
1772                         if (scan[i].bm_bighint == (int32_t)-1) {
1773                                 kprintf(
1774                                     "%*.*s(%04x,%d): Terminator\n",
1775                                     tab, tab, "",
1776                                     blk, radix
1777                                 );
1778                                 lastState = 0;
1779                                 break;
1780                         }
1781                         if ((scan->bm_bitmap & mask) == mask) {
1782                                 kprintf(
1783                                     "%*.*s(%04x,%d): ALL FREE\n",
1784                                     tab, tab, "",
1785                                     blk, radix
1786                                 );
1787                         } else if ((scan->bm_bitmap & mask) == 0) {
1788                                 kprintf(
1789                                     "%*.*s(%04x,%d): ALL ALLOCATED\n",
1790                                     tab, tab, "",
1791                                     blk, radix
1792                                 );
1793                         } else {
1794                                 hammer_alst_radix_print(
1795                                     live,
1796                                     &scan[i],
1797                                     blk,
1798                                     radix,
1799                                     next_skip,
1800                                     tab
1801                                 );
1802                         }
1803                         blk += radix;
1804                         mask <<= 2;
1805                 }
1806         }
1807         tab -= 4;
1808
1809         kprintf(
1810             "%*.*s}\n",
1811             tab, tab, ""
1812         );
1813 }
1814
1815 #endif
1816
1817 #ifdef ALIST_DEBUG
1818
1819 static struct hammer_alist_live **layers;       /* initialized by main */
1820 static int32_t layer_radix = -1;
1821
1822 /*
1823  * Initialize a zone.
1824  *
1825  * If allocating is non-zero this init is being called when transitioning out
1826  * of an all-free state.  Allocate the zone and mark the whole mess as being
1827  * free so the caller can then allocate out of this zone.
1828  *
1829  * If freeing this init is being called when transitioning out of an
1830  * initial all-allocated (00) state.  Allocate the zone but leave the whole
1831  * mess left all-allocated.  The caller will then free the appropriate range.
1832  */
1833 static
1834 int
1835 debug_radix_init(void *info, int32_t blk, int32_t radix,
1836                  enum hammer_alloc_state state)
1837 {
1838         hammer_alist_t layer;
1839         int layer_no = blk / layer_radix;
1840
1841         printf("lower layer: init (%04x,%d) layer_radix=%d\n",
1842                blk, radix, layer_radix);
1843         KKASSERT(layer_radix == radix);
1844         KKASSERT(layers[layer_no] == NULL);
1845         layer = layers[layer_no] = hammer_alist_create(radix, 1, NULL, state); 
1846         return(0);
1847 }
1848
1849 static
1850 int32_t
1851 debug_radix_recover(void *info, int32_t blk, int32_t radix, int32_t count)
1852 {
1853         hammer_alist_t layer;
1854         int layer_no = blk / layer_radix;
1855         int32_t n;
1856
1857         KKASSERT(layer_radix == radix);
1858         KKASSERT(layers[layer_no] != NULL);
1859         layer = layers[layer_no];
1860         n = hammer_alist_recover(layer, blk, 0, count);
1861         printf("Recover layer %d blk %d result %d/%d\n",
1862                 layer_no, blk, n, count);
1863         return(n);
1864 }
1865
1866 static
1867 int32_t
1868 debug_radix_find(void *info, int32_t blk, int32_t radix, int32_t atblk)
1869 {
1870         hammer_alist_t layer;
1871         int layer_no = blk / layer_radix;
1872         int32_t res;
1873
1874         KKASSERT(layer_radix == radix);
1875         KKASSERT(layers[layer_no] != NULL);
1876         layer = layers[layer_no];
1877         res = hammer_alist_find(layer, atblk - blk);
1878         if (res != HAMMER_ALIST_BLOCK_NONE)
1879                 res += blk;
1880         return(res);
1881 }
1882
1883 /*
1884  * This is called when a zone becomes entirely free, typically after a
1885  * call to debug_radix_free() has indicated that the entire zone is now
1886  * free.
1887  */
1888 static
1889 int
1890 debug_radix_destroy(void *info, int32_t blk, int32_t radix)
1891 {
1892         hammer_alist_t layer;
1893         int layer_no = blk / layer_radix;
1894
1895         printf("lower layer: destroy (%04x,%d)\n", blk, radix);
1896         layer = layers[layer_no];
1897         KKASSERT(layer != NULL);
1898         hammer_alist_destroy(layer, NULL);
1899         layers[layer_no] = NULL;
1900         return(0);
1901 }
1902
1903
1904 static
1905 int32_t
1906 debug_radix_alloc_fwd(void *info, int32_t blk, int32_t radix,
1907                       int32_t count, int32_t atblk, int32_t *fullp)
1908 {
1909         hammer_alist_t layer = layers[blk / layer_radix];
1910         int32_t r;
1911
1912         r = hammer_alist_alloc_fwd(layer, count, atblk - blk);
1913         *fullp = hammer_alist_isfull(layer);
1914         if (r != HAMMER_ALIST_BLOCK_NONE)
1915                 r += blk;
1916         return(r);
1917 }
1918
1919 static
1920 int32_t
1921 debug_radix_alloc_rev(void *info, int32_t blk, int32_t radix,
1922                       int32_t count, int32_t atblk, int32_t *fullp)
1923 {
1924         hammer_alist_t layer = layers[blk / layer_radix];
1925         int32_t r;
1926
1927         r = hammer_alist_alloc_rev(layer, count, atblk - blk);
1928         *fullp = hammer_alist_isfull(layer);
1929         if (r != HAMMER_ALIST_BLOCK_NONE)
1930                 r += blk;
1931         return(r);
1932 }
1933
1934 static
1935 void
1936 debug_radix_free(void *info, int32_t blk, int32_t radix,
1937                  int32_t base_blk, int32_t count, int32_t *emptyp)
1938 {
1939         int layer_no = blk / layer_radix;
1940         hammer_alist_t layer = layers[layer_no];
1941
1942         KKASSERT(layer);
1943         hammer_alist_free(layer, base_blk, count);
1944         *emptyp = hammer_alist_isempty(layer);
1945 }
1946
1947 static
1948 void
1949 debug_radix_print(void *info, int32_t blk, int32_t radix, int tab)
1950 {
1951         hammer_alist_t layer = layers[blk / layer_radix];
1952
1953         hammer_alist_print(layer, tab);
1954 }
1955
1956 int
1957 main(int ac, char **av)
1958 {
1959         int32_t size = -1;
1960         int i;
1961         hammer_alist_t live;
1962         hammer_almeta_t meta = NULL;
1963
1964         for (i = 1; i < ac; ++i) {
1965                 const char *ptr = av[i];
1966                 if (*ptr != '-') {
1967                         if (size == -1)
1968                                 size = strtol(ptr, NULL, 0);
1969                         else if (layer_radix == -1)
1970                                 layer_radix = strtol(ptr, NULL, 0);
1971                         else
1972                                 ;
1973                         continue;
1974                 }
1975                 ptr += 2;
1976                 fprintf(stderr, "Bad option: %s\n", ptr - 2);
1977                 exit(1);
1978         }
1979         if (size == -1)
1980                 size = 1024;
1981         if (layer_radix == -1)
1982                 layer_radix = 1;        /* no second storage layer */
1983         if ((size ^ (size - 1)) != (size << 1) - 1) {
1984                 fprintf(stderr, "size must be a power of 2\n");
1985                 exit(1);
1986         }
1987         if ((layer_radix ^ (layer_radix - 1)) != (layer_radix << 1) - 1) {
1988                 fprintf(stderr, "the second layer radix must be a power of 2\n");
1989                 exit(1);
1990         }
1991
1992         live = hammer_alist_create(size, layer_radix, NULL,
1993                                    HAMMER_ASTATE_ALLOC);
1994         layers = calloc(size, sizeof(hammer_alist_t));
1995
1996         printf("A-LIST TEST %d blocks, first-layer radix %d, "
1997                "second-layer radix %d\n",
1998                 size, live->config->bl_radix / layer_radix, layer_radix);
1999
2000         live->config->bl_radix_init = debug_radix_init;
2001         live->config->bl_radix_recover = debug_radix_recover;
2002         live->config->bl_radix_find = debug_radix_find;
2003         live->config->bl_radix_destroy = debug_radix_destroy;
2004         live->config->bl_radix_alloc_fwd = debug_radix_alloc_fwd;
2005         live->config->bl_radix_alloc_rev = debug_radix_alloc_rev;
2006         live->config->bl_radix_free = debug_radix_free;
2007         live->config->bl_radix_print = debug_radix_print;
2008
2009         hammer_alist_free(live, 0, size);
2010
2011         for (;;) {
2012                 char buf[1024];
2013                 int32_t da = 0;
2014                 int32_t count = 0;
2015                 int32_t atblk;
2016                 int32_t blk;
2017
2018                 kprintf("%d/%d> ",
2019                         live->meta->bm_alist_freeblks, size);
2020                 fflush(stdout);
2021                 if (fgets(buf, sizeof(buf), stdin) == NULL)
2022                         break;
2023                 switch(buf[0]) {
2024                 case 'p':
2025                         hammer_alist_print(live, 0);
2026                         atblk = 0;
2027                         kprintf("allocated: ");
2028                         while ((atblk = hammer_alist_find(live, atblk)) != HAMMER_ALIST_BLOCK_NONE) {
2029                                 kprintf(" %d", atblk);
2030                                 ++atblk;
2031                         }
2032                         kprintf("\n");
2033                         break;
2034                 case 'a':
2035                         atblk = 0;
2036                         if (sscanf(buf + 1, "%d %d", &count, &atblk) >= 1) {
2037                                 blk = hammer_alist_alloc_fwd(live, count, atblk);
2038                                 kprintf("    R=%04x\n", blk);
2039                         } else {
2040                                 kprintf("?\n");
2041                         }
2042                         break;
2043                 case 'r':
2044                         atblk = HAMMER_ALIST_BLOCK_MAX;
2045                         if (sscanf(buf + 1, "%d %d", &count, &atblk) >= 1) {
2046                                 blk = hammer_alist_alloc_rev(live, count, atblk);
2047                                 kprintf("    R=%04x\n", blk);
2048                         } else {
2049                                 kprintf("?\n");
2050                         }
2051                         break;
2052                 case 'f':
2053                         if (sscanf(buf + 1, "%x %d", &da, &count) == 2) {
2054                                 hammer_alist_free(live, da, count);
2055                                 if (hammer_alist_isempty(live))
2056                                         kprintf("a-list is now 100%% empty\n");
2057                         } else {
2058                                 kprintf("?\n");
2059                         }
2060                         break;
2061                 case 'R':
2062                         {
2063                                 int n;
2064
2065                                 n = hammer_alist_recover(live, 0, 0,
2066                                            live->meta->bm_alist_base_freeblks);
2067                                 if (n < 0)
2068                                         kprintf("recover: error %d\n", -n);
2069                                 else
2070                                         kprintf("recover: %d free\n", n);
2071                         }
2072                         break;
2073                 case '?':
2074                 case 'h':
2075                         puts(
2076                             "p          -print\n"
2077                             "a %d       -allocate\n"
2078                             "r %d       -allocate reverse\n"
2079                             "f %x %d    -free\n"
2080                             "R          -recovery a-list\n"
2081                             "h/?        -help"
2082                         );
2083                         break;
2084                 default:
2085                         kprintf("?\n");
2086                         break;
2087                 }
2088         }
2089         return(0);
2090 }
2091
2092 void
2093 panic(const char *ctl, ...)
2094 {
2095         __va_list va;
2096
2097         __va_start(va, ctl);
2098         vfprintf(stderr, ctl, va);
2099         fprintf(stderr, "\n");
2100         __va_end(va);
2101         exit(1);
2102 }
2103
2104 #endif
2105