Merge from vendor branch OPENSSL:
[dragonfly.git] / sys / vfs / hammer / hammer_alist.c
1 /*
2  * HAMMER_ALIST.C -
3  *
4  * Bitmap allocator/deallocator, using a radix tree with hinting.
5  * Unlimited-size allocations, power-of-2 only, power-of-2 aligned results
6  * only.  This module has been separated from the generic kernel module and
7  * written specifically for embedding in HAMMER storage structures.
8  * 
9  * Copyright (c) 2007 The DragonFly Project.  All rights reserved.
10  * 
11  * This code is derived from software contributed to The DragonFly Project
12  * by Matthew Dillon <dillon@backplane.com>
13  * 
14  * Redistribution and use in source and binary forms, with or without
15  * modification, are permitted provided that the following conditions
16  * are met:
17  * 
18  * 1. Redistributions of source code must retain the above copyright
19  *    notice, this list of conditions and the following disclaimer.
20  * 2. Redistributions in binary form must reproduce the above copyright
21  *    notice, this list of conditions and the following disclaimer in
22  *    the documentation and/or other materials provided with the
23  *    distribution.
24  * 3. Neither the name of The DragonFly Project nor the names of its
25  *    contributors may be used to endorse or promote products derived
26  *    from this software without specific, prior written permission.
27  * 
28  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
29  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
30  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
31  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE
32  * COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
33  * INCIDENTAL, SPECIAL, EXEMPLARY OR CONSEQUENTIAL DAMAGES (INCLUDING,
34  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
35  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
36  * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
37  * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
38  * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
39  * SUCH DAMAGE.
40  *
41  * $DragonFly: src/sys/vfs/hammer/Attic/hammer_alist.c,v 1.3 2007/10/16 18:16:42 dillon Exp $
42  */
43 /*
44  * This module implements a generic allocator through the use of a hinted
45  * radix tree.  All allocations must be in powers of 2 and will return
46  * similarly aligned results.  The radix tree typically recurses within
47  * a memory buffer and then continues its recursion by chaining to other
48  * memory buffers, creating a seemless whole capable of managing any amount
49  * of storage.
50  *
51  * The radix tree is layed out recursively using a linear array.  Each meta
52  * node is immediately followed (layed out sequentially in memory) by
53  * HAMMER_ALIST_META_RADIX lower level nodes.  This is a recursive structure
54  * but one that can be easily scanned through a very simple 'skip'
55  * calculation.
56  *
57  * The radix tree supports an early-termination optimization which
58  * effectively allows us to efficiently mix large and small allocations
59  * with a single abstraction.  The address space can be partitioned
60  * arbitrarily without adding much in the way of additional meta-storage
61  * for the allocator.
62  *
63  * The radix tree supports allocator layering. By supplying a base_radix > 1
64  * the allocator will issue callbacks to recurse into lower layers once 
65  * higher layers have been exhausted.  Allocations in multiples of base_radix
66  * will be entirely retained in the higher level allocator and never recurse.
67  *
68  * This code can be compiled stand-alone for debugging.
69  */
70
71 #ifdef _KERNEL
72
73 #include <sys/param.h>
74 #include <sys/systm.h>
75 #include <sys/lock.h>
76 #include <sys/kernel.h>
77 #include <sys/malloc.h>
78 #include <vm/vm.h>
79 #include <vm/vm_object.h>
80 #include <vm/vm_kern.h>
81 #include <vm/vm_extern.h>
82 #include <vm/vm_page.h>
83
84 #include "hammer_disk.h"
85
86 #else
87
88 #ifndef ALIST_NO_DEBUG
89 #define ALIST_DEBUG
90 #endif
91
92 #include <sys/types.h>
93 #include <stdio.h>
94 #include <assert.h>
95 #include <string.h>
96 #include <stdlib.h>
97 #include <stdarg.h>
98
99 #define kmalloc(a,b,c)  malloc(a)
100 #define kfree(a,b)      free(a)
101 #define kprintf         printf
102 #define KKASSERT(exp)   assert(exp)
103 struct malloc_type;
104
105 #include "hammer_alist.h"
106
107 void panic(const char *ctl, ...);
108
109 #endif
110
111 /*
112  * static support functions
113  */
114
115 static int32_t hammer_alst_leaf_alloc_fwd(hammer_almeta_t scan,
116                                         int32_t blk, int count, int32_t atblk);
117 static int32_t hammer_alst_meta_alloc_fwd(hammer_alist_t live,
118                                         hammer_almeta_t scan,
119                                         int32_t blk, int32_t count,
120                                         int32_t radix, int skip, int32_t atblk);
121 static int32_t hammer_alst_leaf_alloc_rev(hammer_almeta_t scan,
122                                         int32_t blk, int count, int32_t atblk);
123 static int32_t hammer_alst_meta_alloc_rev(hammer_alist_t live,
124                                         hammer_almeta_t scan,
125                                         int32_t blk, int32_t count,
126                                         int32_t radix, int skip, int32_t atblk);
127 static void hammer_alst_leaf_free(hammer_almeta_t scan,
128                                         int32_t relblk, int count);
129 static void hammer_alst_meta_free(hammer_alist_t live, hammer_almeta_t scan,
130                                         int32_t freeBlk, int32_t count, 
131                                         int32_t radix, int skip, int32_t blk);
132 static int32_t  hammer_alst_radix_init(hammer_almeta_t scan,
133                                         int32_t radix, int skip, int32_t count);
134 #ifdef ALIST_DEBUG
135 static void     hammer_alst_radix_print(hammer_alist_t live,
136                                         hammer_almeta_t scan, int32_t blk,
137                                         int32_t radix, int skip, int tab);
138 #endif
139
140 /*
141  * Initialize an a-list config structure for use.  The config structure
142  * describes the basic structure of an a-list's topology and may be
143  * shared by any number of a-lists which have the same topology.
144  *
145  * blocks is the total number of blocks, that is the number of blocks
146  * handled at this layer multiplied by the base radix.
147  *
148  * When base_radix != 1 the A-list has only meta elements and does not have
149  * any leaves, in order to be able to track partial allocations.
150  */
151 void
152 hammer_alist_template(hammer_alist_config_t bl, int32_t blocks,
153                       int32_t base_radix, int32_t maxmeta)
154 {
155         int radix;
156         int skip;
157
158         /*
159          * Calculate radix and skip field used for scanning.  The leaf nodes
160          * in our tree are either BMAP or META nodes depending on whether
161          * we chain to a lower level allocation layer or not.
162          */
163         if (base_radix == 1)
164                 radix = HAMMER_ALIST_BMAP_RADIX;
165         else
166                 radix = HAMMER_ALIST_META_RADIX;
167         skip = 1;
168
169         while (radix < blocks / base_radix) {
170                 radix *= HAMMER_ALIST_META_RADIX;
171                 skip = skip * HAMMER_ALIST_META_RADIX + 1;
172         }
173
174         /*
175          * Increase the radix based on the number of blocks a lower level
176          * allocator is able to handle at the 'base' of our allocator.
177          * If base_radix != 1 the caller will have to initialize the callback
178          * fields to implement the lower level allocator.
179          */
180         radix *= base_radix;
181
182         bzero(bl, sizeof(*bl));
183
184         bl->bl_blocks = blocks;
185         bl->bl_base_radix = base_radix;
186         bl->bl_radix = radix;
187         bl->bl_skip = skip;
188         bl->bl_rootblks = hammer_alst_radix_init(NULL, bl->bl_radix,
189                                                  bl->bl_skip, blocks);
190         ++bl->bl_rootblks;      /* one more for freeblks header */
191         if (base_radix == 1)
192                 bl->bl_terminal = 1;
193         KKASSERT(maxmeta == 0 || bl->bl_rootblks <= maxmeta);
194
195 #if defined(ALIST_DEBUG)
196         kprintf(
197                 "PRIMARY ALIST LAYER manages %d blocks"
198                 ", requiring %dK (%d bytes) of ram\n",
199                 bl->bl_blocks / bl->bl_base_radix,
200                 (bl->bl_rootblks * sizeof(struct hammer_almeta) + 1023) / 1024,
201                 (bl->bl_rootblks * sizeof(struct hammer_almeta))
202         );
203         kprintf("ALIST raw radix tree contains %d records\n", bl->bl_rootblks);
204 #endif
205 }
206
207 void
208 hammer_alist_init(hammer_alist_t live)
209 {
210         hammer_alist_config_t bl = live->config;
211
212         live->meta->bm_bighint = 0;
213         live->meta->bm_alist_freeblks = 0;
214         hammer_alst_radix_init(live->meta + 1, bl->bl_radix,
215                                bl->bl_skip, bl->bl_blocks);
216 }
217
218 #if !defined(_KERNEL) && defined(ALIST_DEBUG)
219
220 /*
221  * hammer_alist_create()        (userland only)
222  *
223  *      create a alist capable of handling up to the specified number of
224  *      blocks.  blocks must be greater then 0
225  *
226  *      The smallest alist consists of a single leaf node capable of 
227  *      managing HAMMER_ALIST_BMAP_RADIX blocks, or (if base_radix != 1)
228  *      a single meta node capable of managing HAMMER_ALIST_META_RADIX
229  *      blocks which recurses into other storage layers for a total
230  *      handling capability of (HAMMER_ALIST_META_RADIX * base_radix) blocks.
231  *
232  *      Larger a-list's increase their capability exponentially by
233  *      HAMMER_ALIST_META_RADIX.
234  *
235  *      The block count is the total number of blocks inclusive of any
236  *      layering.  blocks can be less then base_radix and will result in
237  *      a radix tree with a single leaf node which then chains down.
238  */
239
240 hammer_alist_t 
241 hammer_alist_create(int32_t blocks, int32_t base_radix,
242                     struct malloc_type *mtype)
243 {
244         hammer_alist_t live;
245         hammer_alist_config_t bl;
246         size_t metasize;
247
248         live = kmalloc(sizeof(*live), mtype, M_WAITOK);
249         live->config = bl = kmalloc(sizeof(*bl), mtype, M_WAITOK);
250         hammer_alist_template(bl, blocks, base_radix, 0);
251
252         metasize = sizeof(*live->meta) * bl->bl_rootblks;
253         live->meta = kmalloc(metasize, mtype, M_WAITOK);
254         bzero(live->meta, metasize);
255
256 #if defined(ALIST_DEBUG)
257         kprintf(
258                 "ALIST representing %d blocks (%d MB of swap)"
259                 ", requiring %dK (%d bytes) of ram\n",
260                 bl->bl_blocks,
261                 bl->bl_blocks * 4 / 1024,
262                 (bl->bl_rootblks * sizeof(*live->meta) + 1023) / 1024,
263                 (bl->bl_rootblks * sizeof(*live->meta))
264         );
265         if (base_radix != 1) {
266                 kprintf("ALIST recurses when it reaches a base_radix of %d\n",
267                         base_radix);
268         }
269         kprintf("ALIST raw radix tree contains %d records\n", bl->bl_rootblks);
270 #endif
271         hammer_alist_init(live);
272         return(live);
273 }
274
275 void
276 hammer_alist_destroy(hammer_alist_t live, struct malloc_type *mtype)
277 {
278         kfree(live->config, mtype);
279         kfree(live->meta, mtype);
280         live->config = NULL;
281         live->meta = NULL;
282         kfree(live, mtype);
283 }
284
285 #endif
286
287 /*
288  * hammer_alist_alloc()
289  *
290  *      Reserve space in the block bitmap.  Return the base of a contiguous
291  *      region or HAMMER_ALIST_BLOCK_NONE if space could not be allocated.
292  */
293
294 int32_t 
295 hammer_alist_alloc(hammer_alist_t live, int32_t count)
296 {
297         int32_t blk = HAMMER_ALIST_BLOCK_NONE;
298         hammer_alist_config_t bl = live->config;
299
300         KKASSERT((count | (count - 1)) == (count << 1) - 1);
301
302         if (bl && count <= bl->bl_radix) {
303                 /*
304                  * When skip is 1 we are at a leaf.  If we are the terminal
305                  * allocator we use our native leaf functions and radix will
306                  * be HAMMER_ALIST_BMAP_RADIX.  Otherwise we are a meta node
307                  * which will chain to another allocator.
308                  */
309                 if (bl->bl_skip == 1 && bl->bl_terminal) {
310 #ifndef _KERNEL
311                         KKASSERT(bl->bl_radix == HAMMER_ALIST_BMAP_RADIX);
312 #endif
313                         blk = hammer_alst_leaf_alloc_fwd(
314                                     live->meta + 1, 0, count, 0);
315                 } else {
316                         blk = hammer_alst_meta_alloc_fwd(
317                                     live, live->meta + 1,
318                                     0, count, bl->bl_radix, bl->bl_skip, 0);
319                 }
320                 if (blk != HAMMER_ALIST_BLOCK_NONE)
321                         live->meta->bm_alist_freeblks -= count;
322         }
323         return(blk);
324 }
325
326 int32_t 
327 hammer_alist_alloc_fwd(hammer_alist_t live, int32_t count, int32_t atblk)
328 {
329         int32_t blk = HAMMER_ALIST_BLOCK_NONE;
330         hammer_alist_config_t bl = live->config;
331
332         KKASSERT((count | (count - 1)) == (count << 1) - 1);
333
334         if (bl && count <= bl->bl_radix) {
335                 /*
336                  * When skip is 1 we are at a leaf.  If we are the terminal
337                  * allocator we use our native leaf functions and radix will
338                  * be HAMMER_ALIST_BMAP_RADIX.  Otherwise we are a meta node
339                  * which will chain to another allocator.
340                  */
341                 if (bl->bl_skip == 1 && bl->bl_terminal) {
342 #ifndef _KERNEL
343                         KKASSERT(bl->bl_radix == HAMMER_ALIST_BMAP_RADIX);
344 #endif
345                         blk = hammer_alst_leaf_alloc_fwd(
346                                     live->meta + 1, 0, count, atblk);
347                 } else {
348                         blk = hammer_alst_meta_alloc_fwd(
349                                     live, live->meta + 1,
350                                     0, count, bl->bl_radix, bl->bl_skip, atblk);
351                 }
352                 if (blk != HAMMER_ALIST_BLOCK_NONE)
353                         live->meta->bm_alist_freeblks -= count;
354         }
355         return(blk);
356 }
357
358 int32_t 
359 hammer_alist_alloc_rev(hammer_alist_t live, int32_t count, int32_t atblk)
360 {
361         hammer_alist_config_t bl = live->config;
362         int32_t blk = HAMMER_ALIST_BLOCK_NONE;
363
364         KKASSERT((count | (count - 1)) == (count << 1) - 1);
365
366         if (bl && count < bl->bl_radix) {
367                 if (bl->bl_skip == 1 && bl->bl_terminal) {
368 #ifndef _KERNEL
369                         KKASSERT(bl->bl_radix == HAMMER_ALIST_BMAP_RADIX);
370 #endif
371                         blk = hammer_alst_leaf_alloc_rev(
372                                     live->meta + 1, 0, count, atblk);
373                 } else {
374                         blk = hammer_alst_meta_alloc_rev(
375                                     live, live->meta + 1,
376                                     0, count, bl->bl_radix, bl->bl_skip, atblk);
377                 }
378                 if (blk != HAMMER_ALIST_BLOCK_NONE)
379                         live->meta->bm_alist_freeblks -= count;
380         }
381         return(blk);
382 }
383
384 /*
385  * alist_free()
386  *
387  *      Free up space in the block bitmap.  Return the base of a contiguous
388  *      region.  Panic if an inconsistancy is found.
389  *
390  *      Unlike allocations, there are no alignment requirements for blkno or
391  *      count when freeing blocks.
392  */
393
394 void 
395 hammer_alist_free(hammer_alist_t live, int32_t blkno, int32_t count)
396 {
397         hammer_alist_config_t bl = live->config;
398
399         KKASSERT(blkno + count <= bl->bl_blocks);
400         if (bl->bl_skip == 1 && bl->bl_terminal) {
401 #ifndef _KERNEL
402                 KKASSERT(bl->bl_radix == HAMMER_ALIST_BMAP_RADIX);
403 #endif
404                 hammer_alst_leaf_free(live->meta + 1, blkno, count);
405         } else {
406                 hammer_alst_meta_free(live, live->meta + 1,
407                                       blkno, count,
408                                       bl->bl_radix, bl->bl_skip, 0);
409         }
410         live->meta->bm_alist_freeblks += count;
411 }
412
413 int
414 hammer_alist_isfull(hammer_alist_t live)
415 {
416         return(live->meta->bm_alist_freeblks == 0);
417 }
418
419 int
420 hammer_alist_isempty(hammer_alist_t live)
421 {
422         return(live->meta->bm_alist_freeblks == live->config->bl_radix);
423 }
424
425 #ifdef ALIST_DEBUG
426
427 /*
428  * alist_print()    - dump radix tree
429  */
430
431 void
432 hammer_alist_print(hammer_alist_t live, int tab)
433 {
434         hammer_alist_config_t bl = live->config;
435
436         kprintf("%*.*sALIST (%d free blocks) {\n",
437                 tab, tab, "", live->meta->bm_alist_freeblks);
438         hammer_alst_radix_print(live, live->meta + 1, 0,
439                                 bl->bl_radix, bl->bl_skip, tab + 4);
440         kprintf("%*.*s}\n", tab, tab, "");
441 }
442
443 #endif
444
445 /************************************************************************
446  *                        ALLOCATION SUPPORT FUNCTIONS                  *
447  ************************************************************************
448  *
449  *      These support functions do all the actual work.  They may seem 
450  *      rather longish, but that's because I've commented them up.  The
451  *      actual code is straight forward.
452  *
453  */
454
455 /*
456  * hammer_alist_leaf_alloc_fwd()
457  *
458  *      Allocate at a leaf in the radix tree (a bitmap).
459  *
460  *      This is the core of the allocator and is optimized for the 1 block
461  *      and the HAMMER_ALIST_BMAP_RADIX block allocation cases.  Other cases
462  *      are somewhat slower.  The 1 block allocation case is log2 and extremely
463  *      quick.
464  */
465
466 static int32_t
467 hammer_alst_leaf_alloc_fwd(hammer_almeta_t scan, int32_t blk,
468                            int count, int32_t atblk)
469 {
470         u_int32_t orig = scan->bm_bitmap;
471         int32_t saveblk = blk;
472
473         /*
474          * Optimize bitmap all-allocated case.  Also, count = 1
475          * case assumes at least 1 bit is free in the bitmap, so
476          * we have to take care of this case here.
477          */
478         if (orig == 0) {
479                 scan->bm_bighint = 0;
480                 return(HAMMER_ALIST_BLOCK_NONE);
481         }
482
483 #if 0
484         /*
485          * Optimized code to allocate one bit out of the bitmap
486          *
487          * mask iterates e.g. 00001111 00000011 00000001
488          *
489          * mask starts at 00001111
490          */
491         if (count == 1) {
492                 u_int32_t mask;
493                 int j = HAMMER_ALIST_BMAP_RADIX/2;
494                 int r = 0;
495
496                 mask = (u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX/2);
497
498                 while (j) {
499                         if ((orig & mask) == 0) {
500                             r += j;
501                             orig >>= j;
502                         }
503                         j >>= 1;
504                         mask >>= j;
505                 }
506                 scan->bm_bitmap &= ~(1 << r);
507                 return(blk + r);
508         }
509 #endif
510
511         /*
512          * non-optimized code to allocate N bits out of the bitmap.
513          * The more bits, the faster the code runs.  It will run
514          * the slowest allocating 2 bits, but since there aren't any
515          * memory ops in the core loop (or shouldn't be, anyway),
516          * you probably won't notice the difference.
517          *
518          * Similar to the blist case, the alist code also requires
519          * allocations to be power-of-2 sized and aligned to the
520          * size of the allocation, which simplifies the algorithm.
521          */
522         {
523                 int j;
524                 int n = HAMMER_ALIST_BMAP_RADIX - count;
525                 u_int32_t mask;
526
527                 mask = (u_int32_t)-1 >> n;
528
529                 for (j = 0; j <= n; j += count) {
530                         if ((orig & mask) == mask && blk >= atblk) {
531                                 scan->bm_bitmap &= ~mask;
532                                 return(blk);
533                         }
534                         mask = mask << count;
535                         blk += count;
536                 }
537         }
538
539         /*
540          * We couldn't allocate count in this subtree, update bighint if
541          * atblk didn't interfere with the hinting mechanism.
542          */
543         if (saveblk >= atblk)
544                 scan->bm_bighint = count - 1;
545         return(HAMMER_ALIST_BLOCK_NONE);
546 }
547
548 /*
549  * This version allocates blocks in the reverse direction.
550  */
551 static int32_t
552 hammer_alst_leaf_alloc_rev(hammer_almeta_t scan, int32_t blk,
553                            int count, int32_t atblk)
554 {
555         u_int32_t orig = scan->bm_bitmap;
556         int32_t saveblk;
557
558         /*
559          * Optimize bitmap all-allocated case.  Also, count = 1
560          * case assumes at least 1 bit is free in the bitmap, so
561          * we have to take care of this case here.
562          */
563         if (orig == 0) {
564                 scan->bm_bighint = 0;
565                 return(HAMMER_ALIST_BLOCK_NONE);
566         }
567
568 #if 0
569         /*
570          * Optimized code to allocate one bit out of the bitmap
571          */
572         if (count == 1) {
573                 u_int32_t mask;
574                 int j = HAMMER_ALIST_BMAP_RADIX/2;
575                 int r = HAMMER_ALIST_BMAP_RADIX - 1;
576
577                 mask = ~((u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX/2));
578
579                 while (j) {
580                         if ((orig & mask) == 0) {
581                             r -= j;
582                             orig <<= j;
583                         }
584                         j >>= 1;
585                         mask <<= j;
586                 }
587                 scan->bm_bitmap &= ~(1 << r);
588                 return(blk + r);
589         }
590 #endif
591
592         /*
593          * non-optimized code to allocate N bits out of the bitmap.
594          * The more bits, the faster the code runs.  It will run
595          * the slowest allocating 2 bits, but since there aren't any
596          * memory ops in the core loop (or shouldn't be, anyway),
597          * you probably won't notice the difference.
598          *
599          * Similar to the blist case, the alist code also requires
600          * allocations to be power-of-2 sized and aligned to the
601          * size of the allocation, which simplifies the algorithm.
602          *
603          * initial mask if count == 2:  1100....0000
604          */
605         {
606                 int j;
607                 int n = HAMMER_ALIST_BMAP_RADIX - count;
608                 u_int32_t mask;
609
610                 mask = ((u_int32_t)-1 >> n) << n;
611                 blk += n;
612                 saveblk = blk;
613
614                 for (j = n; j >= 0; j -= count) {
615                         if ((orig & mask) == mask && blk <= atblk) {
616                                 scan->bm_bitmap &= ~mask;
617                                 return(blk);
618                         }
619                         mask = mask >> count;
620                         blk -= count;
621                 }
622         }
623
624         /*
625          * We couldn't allocate count in this subtree, update bighint if
626          * atblk didn't interfere with it.
627          */
628         if (saveblk <= atblk)
629                 scan->bm_bighint = count - 1;
630         return(HAMMER_ALIST_BLOCK_NONE);
631 }
632
633 /*
634  * hammer_alist_meta_alloc_fwd()
635  *
636  *      Allocate at a meta in the radix tree.
637  *
638  *      Attempt to allocate at a meta node.  If we can't, we update
639  *      bighint and return a failure.  Updating bighint optimize future
640  *      calls that hit this node.  We have to check for our collapse cases
641  *      and we have a few optimizations strewn in as well.
642  */
643 static int32_t
644 hammer_alst_meta_alloc_fwd(hammer_alist_t live, hammer_almeta_t scan,
645                            int32_t blk, int32_t count,
646                            int32_t radix, int skip, int32_t atblk
647 ) {
648         hammer_alist_config_t bl;
649         u_int32_t mask;
650         u_int32_t pmask;
651         int32_t saveblk;
652         int next_skip;
653         int i;
654
655         /*
656          * ALL-ALLOCATED special case
657          */
658         if (scan->bm_bitmap == 0)  {
659                 scan->bm_bighint = 0;
660                 return(HAMMER_ALIST_BLOCK_NONE);
661         }
662
663         radix /= HAMMER_ALIST_META_RADIX;
664         bl = live->config;
665
666         /*
667          * Radix now represents each bitmap entry for this meta node.  If
668          * the number of blocks being allocated can be fully represented,
669          * we allocate directly out of this meta node.
670          *
671          * Meta node bitmaps use 2 bits per block.
672          *
673          *      00      ALL-ALLOCATED
674          *      01      PARTIALLY-FREE/PARTIALLY-ALLOCATED
675          *      10      (RESERVED)
676          *      11      ALL-FREE
677          */
678         if (count >= radix) {
679                 int n = count / radix * 2;      /* number of bits */
680                 int nd2 = n / 2;
681                 int j;
682
683                 mask = (u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX - n);
684                 saveblk = blk;
685                 for (j = 0; j < HAMMER_ALIST_META_RADIX; j += nd2) {
686                         if ((scan->bm_bitmap & mask) == mask && blk >= atblk) {
687                                 scan->bm_bitmap &= ~mask;
688                                 return(blk);
689                         }
690                         mask <<= n;
691                         blk += radix * nd2;
692                 }
693                 if (scan->bm_bighint >= count && saveblk >= atblk)
694                         scan->bm_bighint = count >> 1;
695                 return(HAMMER_ALIST_BLOCK_NONE);
696         }
697
698         /*
699          * If the count is too big we couldn't allocate anything from a
700          * recursion even if the sub-tree were entirely free.
701          */
702         saveblk = blk;
703         if (count > radix)
704                 goto failed;
705
706         /*
707          * If not we have to recurse.
708          */
709         mask = 0x00000003;
710         pmask = 0x00000001;
711
712         if (skip == 1) {
713                 /*
714                  * If skip is 1 we are a meta leaf node, which means there
715                  * is another allocation layer we have to dive down into.
716                  */
717                 for (i = 0; i < HAMMER_ALIST_META_RADIX; ++i) {
718                         /*
719                          * If the next layer is completely free then call
720                          * its init function to initialize it, then free
721                          * all of its blocks before trying to allocate from
722                          * it.
723                          *
724                          * bl_radix_init returns an error code or 0 on
725                          * success.
726                          */
727                         if ((scan->bm_bitmap & mask) == mask &&
728                             blk + radix > atblk) {
729                                 if (bl->bl_radix_init(live->info, blk, radix) == 0) {
730                                         int32_t empty;
731                                         scan->bm_bitmap &= ~mask;
732                                         scan->bm_bitmap |= pmask;
733                                         bl->bl_radix_free(live->info, blk,
734                                                           radix, 0, radix,
735                                                           &empty);
736                                 }
737                         }
738
739                         /*
740                          * If there may be some free blocks try to allocate
741                          * out of the layer.  If the layer indicates that
742                          * it is completely full then clear both bits to
743                          * propogate the condition.
744                          */
745                         if ((scan->bm_bitmap & mask) == pmask &&
746                             blk + radix > atblk) {
747                                 int32_t r;
748                                 int32_t full;
749
750                                 r = bl->bl_radix_alloc_fwd(live->info, blk,
751                                                            radix, count, atblk,
752                                                            &full);
753                                 if (r != HAMMER_ALIST_BLOCK_NONE) {
754                                         if (full) {
755                                                 scan->bm_bitmap &= ~mask;
756                                                 bl->bl_radix_destroy(live->info, blk, radix);
757                                         }
758                                         return(r);
759                                 }
760                         }
761                         blk += radix;
762                         mask <<= 2;
763                         pmask <<= 2;
764                 }
765         } else {
766                 /*
767                  * Otherwise there are sub-records in the current a-list
768                  * layer.  We can also peek into the sub-layers to get
769                  * more accurate size hints.
770                  */
771                 next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
772                 for (i = 1; i <= skip; i += next_skip) {
773                         if (scan[i].bm_bighint == (int32_t)-1) {
774                                 /* 
775                                  * Terminator
776                                  */
777                                 break;
778                         }
779
780                         /*
781                          * Initialize bitmap if allocating from the all-free
782                          * case.
783                          */
784                         if ((scan->bm_bitmap & mask) == mask) {
785                                 scan[i].bm_bitmap = (u_int32_t)-1;
786                                 scan[i].bm_bighint = radix;
787                         }
788
789                         if (count <= scan[i].bm_bighint &&
790                             blk + radix > atblk) {
791                                 /*
792                                  * count fits in object, recurse into the
793                                  * next layer.  If the next_skip is 1 it
794                                  * will be either a normal leaf or a meta
795                                  * leaf.
796                                  */
797                                 int32_t r;
798
799                                 if (next_skip == 1 && bl->bl_terminal) {
800                                         r = hammer_alst_leaf_alloc_fwd(
801                                                 &scan[i], blk, count, atblk);
802                                 } else {
803                                         r = hammer_alst_meta_alloc_fwd(
804                                                 live, &scan[i],
805                                                 blk, count,
806                                                 radix, next_skip, atblk);
807                                 }
808                                 if (r != HAMMER_ALIST_BLOCK_NONE) {
809                                         if (scan[i].bm_bitmap == 0) {
810                                                 scan->bm_bitmap &= ~mask;
811                                         } else {
812                                                 scan->bm_bitmap &= ~mask;
813                                                 scan->bm_bitmap |= pmask;
814                                         }
815                                         return(r);
816                                 }
817                         }
818                         blk += radix;
819                         mask <<= 2;
820                         pmask <<= 2;
821                 }
822         }
823
824 failed:
825         /*
826          * We couldn't allocate count in this subtree, update bighint.
827          */
828         if (scan->bm_bighint >= count && saveblk >= atblk)
829                 scan->bm_bighint = count >> 1;
830         return(HAMMER_ALIST_BLOCK_NONE);
831 }
832
833 /*
834  * This version allocates blocks in the reverse direction.
835  */
836 static int32_t
837 hammer_alst_meta_alloc_rev(hammer_alist_t live, hammer_almeta_t scan,
838                            int32_t blk, int32_t count,
839                            int32_t radix, int skip, int32_t atblk
840 ) {
841         hammer_alist_config_t bl;
842         int i;
843         int j;
844         u_int32_t mask;
845         u_int32_t pmask;
846         int32_t saveblk;
847         int next_skip;
848
849         /*
850          * ALL-ALLOCATED special case
851          */
852         if (scan->bm_bitmap == 0)  {
853                 scan->bm_bighint = 0;
854                 return(HAMMER_ALIST_BLOCK_NONE);
855         }
856
857         radix /= HAMMER_ALIST_META_RADIX;
858         bl = live->config;
859
860         /*
861          * Radix now represents each bitmap entry for this meta node.  If
862          * the number of blocks being allocated can be fully represented,
863          * we allocate directly out of this meta node.
864          *
865          * Meta node bitmaps use 2 bits per block.
866          *
867          *      00      ALL-ALLOCATED
868          *      01      PARTIALLY-FREE/PARTIALLY-ALLOCATED
869          *      10      (RESERVED)
870          *      11      ALL-FREE
871          */
872         if (count >= radix) {
873                 int n = count / radix * 2;      /* number of bits */
874                 int nd2 = n / 2;                /* number of radi */
875                 int j;
876
877                 /*
878                  * Initial mask if e.g. n == 2:  1100....0000
879                  */
880                 mask = (u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX - n) <<
881                         (HAMMER_ALIST_BMAP_RADIX - n);
882                 blk += (HAMMER_ALIST_META_RADIX - nd2) * radix;
883                 saveblk = blk;
884                 for (j = HAMMER_ALIST_META_RADIX - nd2; j >= 0; j -= nd2) {
885                         if ((scan->bm_bitmap & mask) == mask && blk <= atblk) {
886                                 scan->bm_bitmap &= ~mask;
887                                 return(blk);
888                         }
889                         mask >>= n;
890                         blk -= nd2 * radix;
891                 }
892                 if (scan->bm_bighint >= count && saveblk <= atblk)
893                         scan->bm_bighint = count >> 1;
894                 return(HAMMER_ALIST_BLOCK_NONE);
895         }
896
897         /*
898          * If the count is too big we couldn't allocate anything from a
899          * recursion even if the sub-tree were entirely free.
900          */
901         if (count > radix) {
902                 saveblk = atblk;        /* make it work for the conditional */
903                 goto failed;            /* at the failed label */
904         }
905
906         if (skip == 1) {
907                 /*
908                  * We need the recurse but we are at a meta node leaf, which
909                  * means there is another layer under us.
910                  */
911                 mask = 0xC0000000;
912                 pmask = 0x40000000;
913                 blk += radix * HAMMER_ALIST_META_RADIX - radix;
914                 saveblk = blk;
915
916                 for (i = 0; i < HAMMER_ALIST_META_RADIX; ++i) {
917                         /*
918                          * If the next layer is completely free then call
919                          * its init function to initialize it and mark it
920                          * partially free.
921                          */
922                         if ((scan->bm_bitmap & mask) == mask && blk <= atblk) {
923                                 if (bl->bl_radix_init(live->info, blk, radix) == 0) {
924                                         int32_t empty;
925                                         scan->bm_bitmap &= ~mask;
926                                         scan->bm_bitmap |= pmask;
927                                         bl->bl_radix_free(live->info, blk,
928                                                           radix, 0, radix,
929                                                           &empty);
930                                 }
931                         }
932
933                         /*
934                          * If there may be some free blocks try to allocate
935                          * out of the layer.  If the layer indicates that
936                          * it is completely full then clear both bits to
937                          * propogate the condition.
938                          */
939                         if ((scan->bm_bitmap & mask) == pmask && blk <= atblk) {
940                                 int32_t r;
941                                 int32_t full;
942
943                                 r = bl->bl_radix_alloc_rev(live->info, blk,
944                                                            radix, count,
945                                                            atblk, &full);
946                                 if (r != HAMMER_ALIST_BLOCK_NONE) {
947                                         if (full) {
948                                                 scan->bm_bitmap &= ~mask;
949                                                 bl->bl_radix_destroy(live->info, blk, radix);
950                                         }
951                                         return(r);
952                                 }
953                         }
954                         mask >>= 2;
955                         pmask >>= 2;
956                         blk -= radix;
957                 }
958         } else {
959                 /*
960                  * Since we are going in the reverse direction we need an
961                  * extra loop to determine if there is a terminator, then
962                  * run backwards.
963                  *
964                  * This is a little weird but we do not want to overflow the
965                  * mask/pmask in the loop.
966                  */
967                 next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
968                 j = 0;
969                 for (i = 1; i < skip; i += next_skip) {
970                         if (scan[i].bm_bighint == (int32_t)-1) {
971                                 KKASSERT(j != 0);
972                                 break;
973                         }
974                         blk += radix;
975                         j += 2;
976                 }
977                 blk -= radix;
978                 j -= 2;
979                 mask = 0x00000003 << j;
980                 pmask = 0x00000001 << j;
981                 i -= next_skip;
982                 saveblk = blk;
983
984                 while (i >= 1) {
985                         /*
986                          * Initialize the bitmap in the child if allocating
987                          * from the all-free case.
988                          */
989                         if ((scan->bm_bitmap & mask) == mask) {
990                                 scan[i].bm_bitmap = (u_int32_t)-1;
991                                 scan[i].bm_bighint = radix;
992                         }
993
994                         /*
995                          * Handle various count cases.  Bighint may be too
996                          * large but is never too small.
997                          */
998                         if (count <= scan[i].bm_bighint && blk <= atblk) {
999                                 /*
1000                                  * count fits in object
1001                                  */
1002                                 int32_t r;
1003                                 if (next_skip == 1 && bl->bl_terminal) {
1004                                         r = hammer_alst_leaf_alloc_rev(
1005                                                 &scan[i], blk, count, atblk);
1006                                 } else {
1007                                         r = hammer_alst_meta_alloc_rev(
1008                                                 live, &scan[i],
1009                                                 blk, count,
1010                                                 radix, next_skip, atblk);
1011                                 }
1012                                 if (r != HAMMER_ALIST_BLOCK_NONE) {
1013                                         if (scan[i].bm_bitmap == 0) {
1014                                                 scan->bm_bitmap &= ~mask;
1015                                         } else {
1016                                                 scan->bm_bitmap &= ~mask;
1017                                                 scan->bm_bitmap |= pmask;
1018                                         }
1019                                         return(r);
1020                                 }
1021                         }
1022                         blk -= radix;
1023                         mask >>= 2;
1024                         pmask >>= 2;
1025                         i -= next_skip;
1026                 }
1027         }
1028
1029 failed:
1030         /*
1031          * We couldn't allocate count in this subtree, update bighint.
1032          * Since we are restricted to powers of 2, the next highest count
1033          * we might be able to allocate is (count >> 1).
1034          */
1035         if (scan->bm_bighint >= count && saveblk <= atblk)
1036                 scan->bm_bighint = count >> 1;
1037         return(HAMMER_ALIST_BLOCK_NONE);
1038 }
1039
1040 /*
1041  * BLST_LEAF_FREE()
1042  *
1043  *      Free allocated blocks from leaf bitmap.  The allocation code is
1044  *      restricted to powers of 2, the freeing code is not.
1045  */
1046 static void
1047 hammer_alst_leaf_free(hammer_almeta_t scan, int32_t blk, int count) {
1048         /*
1049          * free some data in this bitmap
1050          *
1051          * e.g.
1052          *      0000111111111110000
1053          *          \_________/\__/
1054          *              v        n
1055          */
1056         int n = blk & (HAMMER_ALIST_BMAP_RADIX - 1);
1057         u_int32_t mask;
1058
1059         mask = ((u_int32_t)-1 << n) &
1060             ((u_int32_t)-1 >> (HAMMER_ALIST_BMAP_RADIX - count - n));
1061
1062         if (scan->bm_bitmap & mask)
1063                 panic("hammer_alst_radix_free: freeing free block");
1064         scan->bm_bitmap |= mask;
1065
1066         /*
1067          * We could probably do a better job here.  We are required to make
1068          * bighint at least as large as the biggest contiguous block of 
1069          * data.  If we just shoehorn it, a little extra overhead will
1070          * be incured on the next allocation (but only that one typically).
1071          */
1072         scan->bm_bighint = HAMMER_ALIST_BMAP_RADIX;
1073 }
1074
1075 /*
1076  * BLST_META_FREE()
1077  *
1078  *      Free allocated blocks from radix tree meta info.
1079  *
1080  *      This support routine frees a range of blocks from the bitmap.
1081  *      The range must be entirely enclosed by this radix node.  If a
1082  *      meta node, we break the range down recursively to free blocks
1083  *      in subnodes (which means that this code can free an arbitrary
1084  *      range whereas the allocation code cannot allocate an arbitrary
1085  *      range).
1086  */
1087
1088 static void 
1089 hammer_alst_meta_free(hammer_alist_t live, hammer_almeta_t scan, 
1090                       int32_t freeBlk, int32_t count,
1091                       int32_t radix, int skip, int32_t blk
1092 ) {
1093         hammer_alist_config_t bl;
1094         int next_skip;
1095         u_int32_t mask;
1096         u_int32_t pmask;
1097         int i;
1098
1099         /*
1100          * Break the free down into its components.  Because it is so easy
1101          * to implement, frees are not limited to power-of-2 sizes.
1102          *
1103          * Each block in a meta-node bitmap takes two bits.
1104          */
1105         radix /= HAMMER_ALIST_META_RADIX;
1106         bl = live->config;
1107
1108         i = (freeBlk - blk) / radix;
1109         blk += i * radix;
1110         mask = 0x00000003 << (i * 2);
1111         pmask = 0x00000001 << (i * 2);
1112
1113         if (skip == 1) {
1114                 /*
1115                  * Our meta node is a leaf node, which means it must recurse
1116                  * into another allocator.
1117                  */
1118                 while (i < HAMMER_ALIST_META_RADIX && blk < freeBlk + count) {
1119                         int32_t v;
1120
1121                         v = blk + radix - freeBlk;
1122                         if (v > count)
1123                                 v = count;
1124
1125                         if (scan->bm_bighint == (int32_t)-1)
1126                                 panic("hammer_alst_meta_free: freeing unexpected range");
1127
1128                         if (freeBlk == blk && count >= radix) {
1129                                 /*
1130                                  * When the sub-tree becomes entirely free
1131                                  * we have to destroy it if it was previously
1132                                  * partially allocated.  If it was previously
1133                                  * fully allocated it has already been
1134                                  * destroyed (or never inited in the first
1135                                  * place).
1136                                  */
1137                                 int32_t empty;
1138
1139                                 if ((scan->bm_bitmap & mask) != 0) {
1140                                         bl->bl_radix_free(live->info, blk, radix,
1141                                                           freeBlk - blk, v, &empty);
1142                                         bl->bl_radix_destroy(live->info, blk, radix);
1143                                 }
1144                                 scan->bm_bitmap |= mask;
1145                                 scan->bm_bighint = radix * HAMMER_ALIST_META_RADIX;
1146                                 /* XXX bighint not being set properly */
1147                         } else {
1148                                 /*
1149                                  * Recursion case.  If freeing from an 
1150                                  * all-allocated state init the sub-tree
1151                                  * first.
1152                                  */
1153                                 int32_t empty;
1154
1155                                 if ((scan->bm_bitmap & mask) == 0)
1156                                         bl->bl_radix_init(live->info, blk, radix);
1157                                 bl->bl_radix_free(live->info, blk, radix,
1158                                                   freeBlk - blk, v, &empty);
1159                                 if (empty) {
1160                                         scan->bm_bitmap |= mask;
1161                                         scan->bm_bighint = radix * HAMMER_ALIST_META_RADIX;
1162                                         bl->bl_radix_destroy(live->info, blk, radix);
1163                                         /* XXX bighint not being set properly */
1164                                 } else {
1165                                         scan->bm_bitmap |= pmask;
1166                                         if (scan->bm_bighint < radix / 2)
1167                                                 scan->bm_bighint = radix / 2;
1168                                         /* XXX bighint not being set properly */
1169                                 }
1170                         }
1171                         ++i;
1172                         mask <<= 2;
1173                         pmask <<= 2;
1174                         count -= v;
1175                         freeBlk += v;
1176                         blk += radix;
1177                 }
1178         } else {
1179                 next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
1180                 i = 1 + i * next_skip;
1181
1182                 while (i <= skip && blk < freeBlk + count) {
1183                         int32_t v;
1184
1185                         v = blk + radix - freeBlk;
1186                         if (v > count)
1187                                 v = count;
1188
1189                         if (scan->bm_bighint == (int32_t)-1)
1190                                 panic("hammer_alst_meta_free: freeing unexpected range");
1191
1192                         if (freeBlk == blk && count >= radix) {
1193                                 /*
1194                                  * All-free case, no need to update sub-tree
1195                                  */
1196                                 scan->bm_bitmap |= mask;
1197                                 scan->bm_bighint = radix * HAMMER_ALIST_META_RADIX;
1198                                 /* XXX bighint not being set properly */
1199                         } else {
1200                                 /*
1201                                  * Recursion case
1202                                  */
1203                                 if (next_skip == 1 && bl->bl_terminal) {
1204                                         hammer_alst_leaf_free(&scan[i], freeBlk, v);
1205                                 } else {
1206                                         hammer_alst_meta_free(live, &scan[i],
1207                                                               freeBlk, v,
1208                                                               radix, next_skip,
1209                                                               blk);
1210                                 }
1211                                 if (scan[i].bm_bitmap == (u_int32_t)-1)
1212                                         scan->bm_bitmap |= mask;
1213                                 else
1214                                         scan->bm_bitmap |= pmask;
1215                                 if (scan->bm_bighint < scan[i].bm_bighint)
1216                                         scan->bm_bighint = scan[i].bm_bighint;
1217                         }
1218                         mask <<= 2;
1219                         pmask <<= 2;
1220                         count -= v;
1221                         freeBlk += v;
1222                         blk += radix;
1223                         i += next_skip;
1224                 }
1225         }
1226 }
1227
1228 /*
1229  * hammer_alst_radix_init()
1230  *
1231  *      Initialize our meta structures and bitmaps and calculate the exact
1232  *      number of meta-nodes required to manage 'count' blocks.  
1233  *
1234  *      The required space may be truncated due to early termination records.
1235  */
1236 static int32_t  
1237 hammer_alst_radix_init(hammer_almeta_t scan, int32_t radix,
1238                        int skip, int32_t count)
1239 {
1240         int i;
1241         int next_skip;
1242         int32_t memindex = 1;
1243         u_int32_t mask;
1244         u_int32_t pmask;
1245
1246         /*
1247          * Basic initialization of the almeta for meta or leaf nodes
1248          */
1249         if (scan) {
1250                 scan->bm_bighint = 0;
1251                 scan->bm_bitmap = 0;
1252         }
1253
1254         /*
1255          * We are at a leaf, we only eat one meta element. 
1256          */
1257         if (skip == 1)
1258                 return(memindex);
1259
1260         /*
1261          * Meta node.  If allocating the entire object we can special
1262          * case it.  However, we need to figure out how much memory
1263          * is required to manage 'count' blocks, so we continue on anyway.
1264          */
1265         radix /= HAMMER_ALIST_META_RADIX;
1266         next_skip = (skip - 1) / HAMMER_ALIST_META_RADIX;
1267         mask = 0x00000003;
1268         pmask = 0x00000001;
1269
1270         for (i = 1; i <= skip; i += next_skip) {
1271                 /*
1272                  * We eat up to this record
1273                  */
1274                 memindex = i;
1275
1276                 if (count >= radix) {
1277                         /*
1278                          * Allocate the entire object
1279                          */
1280                         memindex += hammer_alst_radix_init(
1281                             ((scan) ? &scan[i] : NULL),
1282                             radix,
1283                             next_skip,
1284                             radix
1285                         );
1286                         count -= radix;
1287                         /* already marked as wholely allocated */
1288                 } else if (count > 0) {
1289                         /*
1290                          * Allocate a partial object
1291                          */
1292                         memindex += hammer_alst_radix_init(
1293                             ((scan) ? &scan[i] : NULL),
1294                             radix,
1295                             next_skip,
1296                             count
1297                         );
1298                         count = 0;
1299
1300                         /*
1301                          * Mark as partially allocated
1302                          */
1303                         if (scan)
1304                                 scan->bm_bitmap |= pmask;
1305                 } else {
1306                         /*
1307                          * Add terminator and break out.  The terminal
1308                          * eats the meta node at scan[i].
1309                          */
1310                         ++memindex;
1311                         if (scan)
1312                                 scan[i].bm_bighint = (int32_t)-1;
1313                         /* already marked as wholely allocated */
1314                         break;
1315                 }
1316                 mask <<= 2;
1317                 pmask <<= 2;
1318         }
1319         return(memindex);
1320 }
1321
1322 #ifdef ALIST_DEBUG
1323
1324 static void     
1325 hammer_alst_radix_print(hammer_alist_t live, hammer_almeta_t scan,
1326                         int32_t blk, int32_t radix, int skip, int tab)
1327 {
1328         int i;
1329         int next_skip;
1330         int lastState = 0;
1331         u_int32_t mask;
1332
1333         if (skip == 1 && live->config->bl_terminal) {
1334                 kprintf(
1335                     "%*.*s(%04x,%d): bitmap %08x big=%d\n", 
1336                     tab, tab, "",
1337                     blk, radix,
1338                     scan->bm_bitmap,
1339                     scan->bm_bighint
1340                 );
1341                 return;
1342         }
1343
1344         if (scan->bm_bitmap == 0) {
1345                 kprintf(
1346                     "%*.*s(%04x,%d) ALL ALLOCATED\n",
1347                     tab, tab, "",
1348                     blk,
1349                     radix
1350                 );
1351                 return;
1352         }
1353         if (scan->bm_bitmap == (u_int32_t)-1) {
1354                 kprintf(
1355                     "%*.*s(%04x,%d) ALL FREE\n",
1356                     tab, tab, "",
1357                     blk,
1358                     radix
1359                 );
1360                 return;
1361         }
1362
1363         kprintf(
1364             "%*.*s(%04x,%d): %s (%d) bitmap=%08x big=%d {\n",
1365             tab, tab, "",
1366             blk, radix,
1367             (skip == 1 ? "LAYER" : "subtree"),
1368             radix,
1369             scan->bm_bitmap,
1370             scan->bm_bighint
1371         );
1372
1373         radix /= HAMMER_ALIST_META_RADIX;
1374         tab += 4;
1375         mask = 0x00000003;
1376
1377         if (skip == 1) {
1378                 for (i = 0; i < HAMMER_ALIST_META_RADIX; ++i) {
1379                         if ((scan->bm_bitmap & mask) == mask) {
1380                                 kprintf(
1381                                     "%*.*s(%04x,%d): ALL FREE\n",
1382                                     tab, tab, "",
1383                                     blk, radix
1384                                 );
1385                         } else if ((scan->bm_bitmap & mask) == 0) {
1386                                 kprintf(
1387                                     "%*.*s(%04x,%d): ALL ALLOCATED\n",
1388                                     tab, tab, "",
1389                                     blk, radix
1390                                 );
1391                         } else {
1392                                 live->config->bl_radix_print(
1393                                                 live->info, blk, radix, tab);
1394                         }
1395                         blk += radix;
1396                         mask <<= 2;
1397                 }
1398         } else {
1399                 next_skip = ((u_int)skip / HAMMER_ALIST_META_RADIX);
1400
1401                 for (i = 1; i <= skip; i += next_skip) {
1402                         if (scan[i].bm_bighint == (int32_t)-1) {
1403                                 kprintf(
1404                                     "%*.*s(%04x,%d): Terminator\n",
1405                                     tab, tab, "",
1406                                     blk, radix
1407                                 );
1408                                 lastState = 0;
1409                                 break;
1410                         }
1411                         if ((scan->bm_bitmap & mask) == mask) {
1412                                 kprintf(
1413                                     "%*.*s(%04x,%d): ALL FREE\n",
1414                                     tab, tab, "",
1415                                     blk, radix
1416                                 );
1417                         } else if ((scan->bm_bitmap & mask) == 0) {
1418                                 kprintf(
1419                                     "%*.*s(%04x,%d): ALL ALLOCATED\n",
1420                                     tab, tab, "",
1421                                     blk, radix
1422                                 );
1423                         } else {
1424                                 hammer_alst_radix_print(
1425                                     live,
1426                                     &scan[i],
1427                                     blk,
1428                                     radix,
1429                                     next_skip - 1,
1430                                     tab
1431                                 );
1432                         }
1433                         blk += radix;
1434                         mask <<= 2;
1435                 }
1436         }
1437         tab -= 4;
1438
1439         kprintf(
1440             "%*.*s}\n",
1441             tab, tab, ""
1442         );
1443 }
1444
1445 #endif
1446
1447 #ifdef ALIST_DEBUG
1448
1449 static struct hammer_alist_live **layers;       /* initialized by main */
1450 static int32_t layer_radix = -1;
1451
1452 static
1453 int
1454 debug_radix_init(void *info, int32_t blk, int32_t radix)
1455 {
1456         hammer_alist_t layer;
1457         int layer_no = blk / layer_radix;
1458
1459         printf("lower layer: init (%04x,%d)\n", blk, radix);
1460         KKASSERT(layers[layer_no] == NULL);
1461         layer = layers[layer_no] = hammer_alist_create(radix, 1, NULL); 
1462         return(0);
1463 }
1464
1465 static
1466 int
1467 debug_radix_destroy(void *info, int32_t blk, int32_t radix)
1468 {
1469         hammer_alist_t layer;
1470         int layer_no = blk / layer_radix;
1471
1472         printf("lower layer: destroy (%04x,%d)\n", blk, radix);
1473         layer = layers[layer_no];
1474         KKASSERT(layer != NULL);
1475         hammer_alist_destroy(layer, NULL);
1476         layers[layer_no] = NULL;
1477         return(0);
1478 }
1479
1480
1481 static
1482 int32_t
1483 debug_radix_alloc_fwd(void *info, int32_t blk, int32_t radix,
1484                       int32_t count, int32_t atblk, int32_t *fullp)
1485 {
1486         hammer_alist_t layer = layers[blk / layer_radix];
1487         int32_t r;
1488
1489         r = hammer_alist_alloc_fwd(layer, count, atblk - blk);
1490         *fullp = (layer->meta->bm_alist_freeblks == 0);
1491         if (r != HAMMER_ALIST_BLOCK_NONE)
1492                 r += blk;
1493         return(r);
1494 }
1495
1496 static
1497 int32_t
1498 debug_radix_alloc_rev(void *info, int32_t blk, int32_t radix,
1499                       int32_t count, int32_t atblk, int32_t *fullp)
1500 {
1501         hammer_alist_t layer = layers[blk / layer_radix];
1502         int32_t r;
1503
1504         r = hammer_alist_alloc_rev(layer, count, atblk - blk);
1505         *fullp = (layer->meta->bm_alist_freeblks == 0);
1506         if (r != HAMMER_ALIST_BLOCK_NONE)
1507                 r += blk;
1508         return(r);
1509 }
1510
1511 static
1512 void
1513 debug_radix_free(void *info, int32_t blk, int32_t radix,
1514                  int32_t base_blk, int32_t count, int32_t *emptyp)
1515 {
1516         int layer_no = blk / layer_radix;
1517         hammer_alist_t layer = layers[layer_no];
1518
1519         if (layer == NULL) {
1520                 layer = hammer_alist_create(layer_radix, 1, NULL); 
1521                 layers[layer_no] = layer;
1522         }
1523         hammer_alist_free(layer, base_blk, count);
1524         *emptyp = (layer->meta->bm_alist_freeblks == layer_radix);
1525 }
1526
1527 static
1528 void
1529 debug_radix_print(void *info, int32_t blk, int32_t radix, int tab)
1530 {
1531         hammer_alist_t layer = layers[blk / layer_radix];
1532
1533         hammer_alist_print(layer, tab);
1534 }
1535
1536 int
1537 main(int ac, char **av)
1538 {
1539         int32_t size = -1;
1540         int i;
1541         hammer_alist_t live;
1542         hammer_almeta_t meta = NULL;
1543
1544         for (i = 1; i < ac; ++i) {
1545                 const char *ptr = av[i];
1546                 if (*ptr != '-') {
1547                         if (size == -1)
1548                                 size = strtol(ptr, NULL, 0);
1549                         else if (layer_radix == -1)
1550                                 layer_radix = strtol(ptr, NULL, 0);
1551                         else
1552                                 ;
1553                         continue;
1554                 }
1555                 ptr += 2;
1556                 fprintf(stderr, "Bad option: %s\n", ptr - 2);
1557                 exit(1);
1558         }
1559         if (size == -1)
1560                 size = 1024;
1561         if (layer_radix == -1)
1562                 layer_radix = 1;        /* no second storage layer */
1563         if ((size ^ (size - 1)) != (size << 1) - 1) {
1564                 fprintf(stderr, "size must be a power of 2\n");
1565                 exit(1);
1566         }
1567         if ((layer_radix ^ (layer_radix - 1)) != (layer_radix << 1) - 1) {
1568                 fprintf(stderr, "the second layer radix must be a power of 2\n");
1569                 exit(1);
1570         }
1571
1572         live = hammer_alist_create(size, layer_radix, NULL);
1573         layers = calloc(size, sizeof(hammer_alist_t));
1574
1575         printf("A-LIST TEST %d blocks, first-layer radix %d, "
1576                "second-layer radix %d\n",
1577                 size, live->config->bl_radix / layer_radix, layer_radix);
1578
1579         live->config->bl_radix_init = debug_radix_init;
1580         live->config->bl_radix_destroy = debug_radix_destroy;
1581         live->config->bl_radix_alloc_fwd = debug_radix_alloc_fwd;
1582         live->config->bl_radix_alloc_rev = debug_radix_alloc_rev;
1583         live->config->bl_radix_free = debug_radix_free;
1584         live->config->bl_radix_print = debug_radix_print;
1585
1586         hammer_alist_free(live, 0, size);
1587
1588         for (;;) {
1589                 char buf[1024];
1590                 int32_t da = 0;
1591                 int32_t count = 0;
1592                 int32_t atblk;
1593                 int32_t blk;
1594
1595                 kprintf("%d/%d> ",
1596                         live->meta->bm_alist_freeblks, size);
1597                 fflush(stdout);
1598                 if (fgets(buf, sizeof(buf), stdin) == NULL)
1599                         break;
1600                 switch(buf[0]) {
1601                 case 'p':
1602                         hammer_alist_print(live, 0);
1603                         break;
1604                 case 'a':
1605                         if (sscanf(buf + 1, "%d %d", &count, &atblk) >= 1) {
1606                                 blk = hammer_alist_alloc_fwd(live, count, atblk);
1607                                 kprintf("    R=%04x\n", blk);
1608                         } else {
1609                                 kprintf("?\n");
1610                         }
1611                         break;
1612                 case 'r':
1613                         atblk = HAMMER_ALIST_BLOCK_MAX;
1614                         if (sscanf(buf + 1, "%d %d", &count, &atblk) >= 1) {
1615                                 blk = hammer_alist_alloc_rev(live, count, atblk);
1616                                 kprintf("    R=%04x\n", blk);
1617                         } else {
1618                                 kprintf("?\n");
1619                         }
1620                         break;
1621                 case 'f':
1622                         if (sscanf(buf + 1, "%x %d", &da, &count) == 2) {
1623                                 hammer_alist_free(live, da, count);
1624                         } else {
1625                                 kprintf("?\n");
1626                         }
1627                         break;
1628                 case '?':
1629                 case 'h':
1630                         puts(
1631                             "p          -print\n"
1632                             "a %d       -allocate\n"
1633                             "r %d       -allocate reverse\n"
1634                             "f %x %d    -free\n"
1635                             "h/?        -help"
1636                         );
1637                         break;
1638                 default:
1639                         kprintf("?\n");
1640                         break;
1641                 }
1642         }
1643         return(0);
1644 }
1645
1646 void
1647 panic(const char *ctl, ...)
1648 {
1649         __va_list va;
1650
1651         __va_start(va, ctl);
1652         vfprintf(stderr, ctl, va);
1653         fprintf(stderr, "\n");
1654         __va_end(va);
1655         exit(1);
1656 }
1657
1658 #endif
1659