1135956Sphk/*- 2135956Sphk * Copyright (c) 2004 Poul-Henning Kamp 3135956Sphk * All rights reserved. 4135956Sphk * 5135956Sphk * Redistribution and use in source and binary forms, with or without 6135956Sphk * modification, are permitted provided that the following conditions 7135956Sphk * are met: 8135956Sphk * 1. Redistributions of source code must retain the above copyright 9135956Sphk * notice, this list of conditions and the following disclaimer. 10135956Sphk * 2. Redistributions in binary form must reproduce the above copyright 11135956Sphk * notice, this list of conditions and the following disclaimer in the 12135956Sphk * documentation and/or other materials provided with the distribution. 13135956Sphk * 14135956Sphk * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 15135956Sphk * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 16135956Sphk * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 17135956Sphk * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 18135956Sphk * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 19135956Sphk * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 20135956Sphk * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 21135956Sphk * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 22135956Sphk * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 23135956Sphk * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 24135956Sphk * SUCH DAMAGE. 25135956Sphk * 26135956Sphk * $FreeBSD: releng/10.3/sys/kern/subr_unit.c 255057 2013-08-30 07:37:45Z kib $ 27135956Sphk * 28143283Sphk * 29143283Sphk * Unit number allocation functions. 30143283Sphk * 31135956Sphk * These functions implement a mixed run-length/bitmap management of unit 32143283Sphk * number spaces in a very memory efficient manner. 33135956Sphk * 34143283Sphk * Allocation policy is always lowest free number first. 35135956Sphk * 36143283Sphk * A return value of -1 signals that no more unit numbers are available. 37135956Sphk * 38143283Sphk * There is no cost associated with the range of unitnumbers, so unless 39143283Sphk * the resource really is finite, specify INT_MAX to new_unrhdr() and 40143283Sphk * forget about checking the return value. 41135956Sphk * 42143283Sphk * If a mutex is not provided when the unit number space is created, a 43143283Sphk * default global mutex is used. The advantage to passing a mutex in, is 44218909Sbrucec * that the alloc_unrl() function can be called with the mutex already 45143283Sphk * held (it will not be released by alloc_unrl()). 46135956Sphk * 47143283Sphk * The allocation function alloc_unr{l}() never sleeps (but it may block on 48143283Sphk * the mutex of course). 49143283Sphk * 50143283Sphk * Freeing a unit number may require allocating memory, and can therefore 51143283Sphk * sleep so the free_unr() function does not come in a pre-locked variant. 52143283Sphk * 53135956Sphk * A userland test program is included. 54135956Sphk * 55218909Sbrucec * Memory usage is a very complex function of the exact allocation 56143283Sphk * pattern, but always very compact: 57143283Sphk * * For the very typical case where a single unbroken run of unit 58143283Sphk * numbers are allocated 44 bytes are used on i386. 59143283Sphk * * For a unit number space of 1000 units and the random pattern 60143283Sphk * in the usermode test program included, the worst case usage 61143283Sphk * was 252 bytes on i386 for 500 allocated and 500 free units. 62143283Sphk * * For a unit number space of 10000 units and the random pattern 63143283Sphk * in the usermode test program included, the worst case usage 64143283Sphk * was 798 bytes on i386 for 5000 allocated and 5000 free units. 65143283Sphk * * The worst case is where every other unit number is allocated and 66240518Seadler * the rest are free. In that case 44 + N/4 bytes are used where 67143283Sphk * N is the number of the highest unit allocated. 68135956Sphk */ 69135956Sphk 70135956Sphk#include <sys/types.h> 71135956Sphk#include <sys/bitstring.h> 72255057Skib#include <sys/_unrhdr.h> 73135956Sphk 74135956Sphk#ifdef _KERNEL 75135956Sphk 76135956Sphk#include <sys/param.h> 77135956Sphk#include <sys/malloc.h> 78135956Sphk#include <sys/kernel.h> 79135956Sphk#include <sys/systm.h> 80143283Sphk#include <sys/limits.h> 81143283Sphk#include <sys/lock.h> 82143283Sphk#include <sys/mutex.h> 83135956Sphk 84135956Sphk/* 85135956Sphk * In theory it would be smarter to allocate the individual blocks 86135956Sphk * with the zone allocator, but at this time the expectation is that 87135956Sphk * there will typically not even be enough allocations to fill a single 88135956Sphk * page, so we stick with malloc for now. 89135956Sphk */ 90135956Sphkstatic MALLOC_DEFINE(M_UNIT, "Unitno", "Unit number allocation"); 91135956Sphk 92135956Sphk#define Malloc(foo) malloc(foo, M_UNIT, M_WAITOK | M_ZERO) 93135956Sphk#define Free(foo) free(foo, M_UNIT) 94135956Sphk 95143283Sphkstatic struct mtx unitmtx; 96143283Sphk 97143283SphkMTX_SYSINIT(unit, &unitmtx, "unit# allocation", MTX_DEF); 98143283Sphk 99135956Sphk#else /* ...USERLAND */ 100135956Sphk 101135956Sphk#include <stdio.h> 102135956Sphk#include <stdlib.h> 103143283Sphk#include <string.h> 104135956Sphk 105135956Sphk#define KASSERT(cond, arg) \ 106135956Sphk do { \ 107135956Sphk if (!(cond)) { \ 108135956Sphk printf arg; \ 109143283Sphk abort(); \ 110135956Sphk } \ 111135956Sphk } while (0) 112135956Sphk 113143283Sphkstatic int no_alloc; 114143283Sphk#define Malloc(foo) _Malloc(foo, __LINE__) 115143283Sphkstatic void * 116143283Sphk_Malloc(size_t foo, int line) 117143283Sphk{ 118143283Sphk 119143283Sphk KASSERT(no_alloc == 0, ("malloc in wrong place() line %d", line)); 120143283Sphk return (calloc(foo, 1)); 121143283Sphk} 122135956Sphk#define Free(foo) free(foo) 123135956Sphk 124143283Sphkstruct unrhdr; 125135956Sphk 126143283Sphk 127143283Sphkstruct mtx { 128143283Sphk int state; 129143283Sphk} unitmtx; 130143283Sphk 131143283Sphkstatic void 132143283Sphkmtx_lock(struct mtx *mp) 133143283Sphk{ 134143283Sphk KASSERT(mp->state == 0, ("mutex already locked")); 135143283Sphk mp->state = 1; 136143283Sphk} 137143283Sphk 138143283Sphkstatic void 139143283Sphkmtx_unlock(struct mtx *mp) 140143283Sphk{ 141143283Sphk KASSERT(mp->state == 1, ("mutex not locked")); 142143283Sphk mp->state = 0; 143143283Sphk} 144143283Sphk 145143283Sphk#define MA_OWNED 9 146143283Sphk 147143283Sphkstatic void 148143283Sphkmtx_assert(struct mtx *mp, int flag) 149143283Sphk{ 150143283Sphk if (flag == MA_OWNED) { 151143283Sphk KASSERT(mp->state == 1, ("mtx_assert(MA_OWNED) not true")); 152143283Sphk } 153143283Sphk} 154143283Sphk 155143283Sphk#define CTASSERT(foo) 156209256Sjh#define WITNESS_WARN(flags, lock, fmt, ...) (void)0 157143283Sphk 158143283Sphk#endif /* USERLAND */ 159143283Sphk 160135956Sphk/* 161135956Sphk * This is our basic building block. 162135956Sphk * 163135956Sphk * It can be used in three different ways depending on the value of the ptr 164135956Sphk * element: 165135956Sphk * If ptr is NULL, it represents a run of free items. 166135956Sphk * If ptr points to the unrhdr it represents a run of allocated items. 167135956Sphk * Otherwise it points to an bitstring of allocated items. 168135956Sphk * 169135956Sphk * For runs the len field is the length of the run. 170135956Sphk * For bitmaps the len field represents the number of allocated items. 171135956Sphk * 172135956Sphk * The bitmap is the same size as struct unr to optimize memory management. 173135956Sphk */ 174135956Sphkstruct unr { 175135956Sphk TAILQ_ENTRY(unr) list; 176135956Sphk u_int len; 177135956Sphk void *ptr; 178135956Sphk}; 179135956Sphk 180143283Sphkstruct unrb { 181143283Sphk u_char busy; 182143283Sphk bitstr_t map[sizeof(struct unr) - 1]; 183143283Sphk}; 184143283Sphk 185143283SphkCTASSERT(sizeof(struct unr) == sizeof(struct unrb)); 186143283Sphk 187135956Sphk/* Number of bits in the bitmap */ 188143283Sphk#define NBITS ((int)sizeof(((struct unrb *)NULL)->map) * 8) 189135956Sphk 190135956Sphk#if defined(DIAGNOSTIC) || !defined(_KERNEL) 191135956Sphk/* 192135956Sphk * Consistency check function. 193135956Sphk * 194135956Sphk * Checks the internal consistency as well as we can. 195135956Sphk * 196135956Sphk * Called at all boundaries of this API. 197135956Sphk */ 198135956Sphkstatic void 199135956Sphkcheck_unrhdr(struct unrhdr *uh, int line) 200135956Sphk{ 201135956Sphk struct unr *up; 202143283Sphk struct unrb *ub; 203135956Sphk u_int x, y, z, w; 204135956Sphk 205143283Sphk y = uh->first; 206135956Sphk z = 0; 207135956Sphk TAILQ_FOREACH(up, &uh->head, list) { 208135956Sphk z++; 209135956Sphk if (up->ptr != uh && up->ptr != NULL) { 210143283Sphk ub = up->ptr; 211143283Sphk KASSERT (up->len <= NBITS, 212143283Sphk ("UNR inconsistency: len %u max %d (line %d)\n", 213143283Sphk up->len, NBITS, line)); 214135956Sphk z++; 215135956Sphk w = 0; 216143283Sphk for (x = 0; x < up->len; x++) 217143283Sphk if (bit_test(ub->map, x)) 218135956Sphk w++; 219143283Sphk KASSERT (w == ub->busy, 220143283Sphk ("UNR inconsistency: busy %u found %u (line %d)\n", 221143283Sphk ub->busy, w, line)); 222143283Sphk y += w; 223143283Sphk } else if (up->ptr != NULL) 224135956Sphk y += up->len; 225135956Sphk } 226135956Sphk KASSERT (y == uh->busy, 227135956Sphk ("UNR inconsistency: items %u found %u (line %d)\n", 228135956Sphk uh->busy, y, line)); 229135956Sphk KASSERT (z == uh->alloc, 230135956Sphk ("UNR inconsistency: chunks %u found %u (line %d)\n", 231135956Sphk uh->alloc, z, line)); 232135956Sphk} 233135956Sphk 234135956Sphk#else 235135956Sphk 236135956Sphkstatic __inline void 237135979Sjhbcheck_unrhdr(struct unrhdr *uh, int line) 238135956Sphk{ 239135956Sphk 240135956Sphk} 241135956Sphk 242135956Sphk#endif 243135956Sphk 244135956Sphk 245135956Sphk/* 246135956Sphk * Userland memory management. Just use calloc and keep track of how 247135956Sphk * many elements we have allocated for check_unrhdr(). 248135956Sphk */ 249135956Sphk 250135956Sphkstatic __inline void * 251143283Sphknew_unr(struct unrhdr *uh, void **p1, void **p2) 252135956Sphk{ 253143283Sphk void *p; 254143283Sphk 255135956Sphk uh->alloc++; 256143283Sphk KASSERT(*p1 != NULL || *p2 != NULL, ("Out of cached memory")); 257143283Sphk if (*p1 != NULL) { 258143283Sphk p = *p1; 259143283Sphk *p1 = NULL; 260143283Sphk return (p); 261143283Sphk } else { 262143283Sphk p = *p2; 263143283Sphk *p2 = NULL; 264143283Sphk return (p); 265143283Sphk } 266135956Sphk} 267135956Sphk 268135956Sphkstatic __inline void 269135956Sphkdelete_unr(struct unrhdr *uh, void *ptr) 270135956Sphk{ 271171202Skib struct unr *up; 272143283Sphk 273135956Sphk uh->alloc--; 274171202Skib up = ptr; 275171202Skib TAILQ_INSERT_TAIL(&uh->ppfree, up, list); 276135956Sphk} 277135956Sphk 278171202Skibvoid 279171202Skibclean_unrhdrl(struct unrhdr *uh) 280171202Skib{ 281171202Skib struct unr *up; 282171202Skib 283171202Skib mtx_assert(uh->mtx, MA_OWNED); 284171202Skib while ((up = TAILQ_FIRST(&uh->ppfree)) != NULL) { 285171202Skib TAILQ_REMOVE(&uh->ppfree, up, list); 286171202Skib mtx_unlock(uh->mtx); 287171202Skib Free(up); 288171202Skib mtx_lock(uh->mtx); 289171202Skib } 290171202Skib 291171202Skib} 292171202Skib 293171202Skibvoid 294171202Skibclean_unrhdr(struct unrhdr *uh) 295171202Skib{ 296171202Skib 297171202Skib mtx_lock(uh->mtx); 298171202Skib clean_unrhdrl(uh); 299171202Skib mtx_unlock(uh->mtx); 300171202Skib} 301171202Skib 302255057Skibvoid 303255057Skibinit_unrhdr(struct unrhdr *uh, int low, int high, struct mtx *mutex) 304135956Sphk{ 305135956Sphk 306209844Sjh KASSERT(low >= 0 && low <= high, 307209816Sjh ("UNR: use error: new_unrhdr(%d, %d)", low, high)); 308143283Sphk if (mutex != NULL) 309143283Sphk uh->mtx = mutex; 310143283Sphk else 311143283Sphk uh->mtx = &unitmtx; 312135956Sphk TAILQ_INIT(&uh->head); 313171202Skib TAILQ_INIT(&uh->ppfree); 314135956Sphk uh->low = low; 315135956Sphk uh->high = high; 316143283Sphk uh->first = 0; 317143283Sphk uh->last = 1 + (high - low); 318135956Sphk check_unrhdr(uh, __LINE__); 319255057Skib} 320255057Skib 321255057Skib/* 322255057Skib * Allocate a new unrheader set. 323255057Skib * 324255057Skib * Highest and lowest valid values given as parameters. 325255057Skib */ 326255057Skib 327255057Skibstruct unrhdr * 328255057Skibnew_unrhdr(int low, int high, struct mtx *mutex) 329255057Skib{ 330255057Skib struct unrhdr *uh; 331255057Skib 332255057Skib uh = Malloc(sizeof *uh); 333255057Skib init_unrhdr(uh, low, high, mutex); 334135956Sphk return (uh); 335135956Sphk} 336135956Sphk 337136945Sphkvoid 338136945Sphkdelete_unrhdr(struct unrhdr *uh) 339136945Sphk{ 340136945Sphk 341143283Sphk check_unrhdr(uh, __LINE__); 342136945Sphk KASSERT(uh->busy == 0, ("unrhdr has %u allocations", uh->busy)); 343136945Sphk KASSERT(uh->alloc == 0, ("UNR memory leak in delete_unrhdr")); 344171202Skib KASSERT(TAILQ_FIRST(&uh->ppfree) == NULL, 345171202Skib ("unrhdr has postponed item for free")); 346136945Sphk Free(uh); 347136945Sphk} 348136945Sphk 349143283Sphkstatic __inline int 350143283Sphkis_bitmap(struct unrhdr *uh, struct unr *up) 351143283Sphk{ 352143283Sphk return (up->ptr != uh && up->ptr != NULL); 353143283Sphk} 354143283Sphk 355135956Sphk/* 356143283Sphk * Look for sequence of items which can be combined into a bitmap, if 357143283Sphk * multiple are present, take the one which saves most memory. 358143283Sphk * 359143283Sphk * Return (1) if a sequence was found to indicate that another call 360143283Sphk * might be able to do more. Return (0) if we found no suitable sequence. 361143283Sphk * 362143283Sphk * NB: called from alloc_unr(), no new memory allocation allowed. 363135956Sphk */ 364143283Sphkstatic int 365143283Sphkoptimize_unr(struct unrhdr *uh) 366143283Sphk{ 367143283Sphk struct unr *up, *uf, *us; 368143283Sphk struct unrb *ub, *ubf; 369143283Sphk u_int a, l, ba; 370143283Sphk 371143283Sphk /* 372143283Sphk * Look for the run of items (if any) which when collapsed into 373143283Sphk * a bitmap would save most memory. 374143283Sphk */ 375143283Sphk us = NULL; 376143283Sphk ba = 0; 377143283Sphk TAILQ_FOREACH(uf, &uh->head, list) { 378143283Sphk if (uf->len >= NBITS) 379143283Sphk continue; 380143283Sphk a = 1; 381143283Sphk if (is_bitmap(uh, uf)) 382143283Sphk a++; 383143283Sphk l = uf->len; 384143283Sphk up = uf; 385143283Sphk while (1) { 386143283Sphk up = TAILQ_NEXT(up, list); 387143283Sphk if (up == NULL) 388143283Sphk break; 389143283Sphk if ((up->len + l) > NBITS) 390143283Sphk break; 391143283Sphk a++; 392143283Sphk if (is_bitmap(uh, up)) 393143283Sphk a++; 394143283Sphk l += up->len; 395143283Sphk } 396143283Sphk if (a > ba) { 397143283Sphk ba = a; 398143283Sphk us = uf; 399143283Sphk } 400143283Sphk } 401143283Sphk if (ba < 3) 402143283Sphk return (0); 403143283Sphk 404143283Sphk /* 405143283Sphk * If the first element is not a bitmap, make it one. 406143283Sphk * Trying to do so without allocating more memory complicates things 407143283Sphk * a bit 408143283Sphk */ 409143283Sphk if (!is_bitmap(uh, us)) { 410143283Sphk uf = TAILQ_NEXT(us, list); 411143283Sphk TAILQ_REMOVE(&uh->head, us, list); 412143283Sphk a = us->len; 413143283Sphk l = us->ptr == uh ? 1 : 0; 414143283Sphk ub = (void *)us; 415143283Sphk ub->busy = 0; 416143283Sphk if (l) { 417143283Sphk bit_nset(ub->map, 0, a); 418143283Sphk ub->busy += a; 419143283Sphk } else { 420143283Sphk bit_nclear(ub->map, 0, a); 421143283Sphk } 422143283Sphk if (!is_bitmap(uh, uf)) { 423143283Sphk if (uf->ptr == NULL) { 424143283Sphk bit_nclear(ub->map, a, a + uf->len - 1); 425143283Sphk } else { 426143283Sphk bit_nset(ub->map, a, a + uf->len - 1); 427143283Sphk ub->busy += uf->len; 428143283Sphk } 429143283Sphk uf->ptr = ub; 430143283Sphk uf->len += a; 431143283Sphk us = uf; 432143283Sphk } else { 433143283Sphk ubf = uf->ptr; 434143283Sphk for (l = 0; l < uf->len; l++, a++) { 435143283Sphk if (bit_test(ubf->map, l)) { 436143283Sphk bit_set(ub->map, a); 437143283Sphk ub->busy++; 438143283Sphk } else { 439143283Sphk bit_clear(ub->map, a); 440143283Sphk } 441143283Sphk } 442143283Sphk uf->len = a; 443143283Sphk delete_unr(uh, uf->ptr); 444143283Sphk uf->ptr = ub; 445143283Sphk us = uf; 446143283Sphk } 447143283Sphk } 448143283Sphk ub = us->ptr; 449143283Sphk while (1) { 450143283Sphk uf = TAILQ_NEXT(us, list); 451143283Sphk if (uf == NULL) 452143283Sphk return (1); 453143283Sphk if (uf->len + us->len > NBITS) 454143283Sphk return (1); 455143283Sphk if (uf->ptr == NULL) { 456143283Sphk bit_nclear(ub->map, us->len, us->len + uf->len - 1); 457143283Sphk us->len += uf->len; 458143283Sphk TAILQ_REMOVE(&uh->head, uf, list); 459143283Sphk delete_unr(uh, uf); 460143283Sphk } else if (uf->ptr == uh) { 461143283Sphk bit_nset(ub->map, us->len, us->len + uf->len - 1); 462143283Sphk ub->busy += uf->len; 463143283Sphk us->len += uf->len; 464143283Sphk TAILQ_REMOVE(&uh->head, uf, list); 465143283Sphk delete_unr(uh, uf); 466143283Sphk } else { 467143283Sphk ubf = uf->ptr; 468143283Sphk for (l = 0; l < uf->len; l++, us->len++) { 469143283Sphk if (bit_test(ubf->map, l)) { 470143283Sphk bit_set(ub->map, us->len); 471143283Sphk ub->busy++; 472143283Sphk } else { 473143283Sphk bit_clear(ub->map, us->len); 474143283Sphk } 475143283Sphk } 476143283Sphk TAILQ_REMOVE(&uh->head, uf, list); 477143283Sphk delete_unr(uh, ubf); 478143283Sphk delete_unr(uh, uf); 479143283Sphk } 480143283Sphk } 481143283Sphk} 482143283Sphk 483143283Sphk/* 484143283Sphk * See if a given unr should be collapsed with a neighbor. 485143283Sphk * 486143283Sphk * NB: called from alloc_unr(), no new memory allocation allowed. 487143283Sphk */ 488135956Sphkstatic void 489135956Sphkcollapse_unr(struct unrhdr *uh, struct unr *up) 490135956Sphk{ 491135956Sphk struct unr *upp; 492143283Sphk struct unrb *ub; 493135956Sphk 494143283Sphk /* If bitmap is all set or clear, change it to runlength */ 495143283Sphk if (is_bitmap(uh, up)) { 496143283Sphk ub = up->ptr; 497143283Sphk if (ub->busy == up->len) { 498143283Sphk delete_unr(uh, up->ptr); 499143283Sphk up->ptr = uh; 500143283Sphk } else if (ub->busy == 0) { 501143283Sphk delete_unr(uh, up->ptr); 502143283Sphk up->ptr = NULL; 503143283Sphk } 504143283Sphk } 505143283Sphk 506143283Sphk /* If nothing left in runlength, delete it */ 507143283Sphk if (up->len == 0) { 508143283Sphk upp = TAILQ_PREV(up, unrhd, list); 509143283Sphk if (upp == NULL) 510143283Sphk upp = TAILQ_NEXT(up, list); 511143283Sphk TAILQ_REMOVE(&uh->head, up, list); 512143283Sphk delete_unr(uh, up); 513143283Sphk up = upp; 514143283Sphk } 515143283Sphk 516143283Sphk /* If we have "hot-spot" still, merge with neighbor if possible */ 517143283Sphk if (up != NULL) { 518143283Sphk upp = TAILQ_PREV(up, unrhd, list); 519143283Sphk if (upp != NULL && up->ptr == upp->ptr) { 520143283Sphk up->len += upp->len; 521143283Sphk TAILQ_REMOVE(&uh->head, upp, list); 522143283Sphk delete_unr(uh, upp); 523143283Sphk } 524143283Sphk upp = TAILQ_NEXT(up, list); 525143283Sphk if (upp != NULL && up->ptr == upp->ptr) { 526143283Sphk up->len += upp->len; 527143283Sphk TAILQ_REMOVE(&uh->head, upp, list); 528143283Sphk delete_unr(uh, upp); 529143283Sphk } 530143283Sphk } 531143283Sphk 532143283Sphk /* Merge into ->first if possible */ 533143283Sphk upp = TAILQ_FIRST(&uh->head); 534143283Sphk if (upp != NULL && upp->ptr == uh) { 535143283Sphk uh->first += upp->len; 536135956Sphk TAILQ_REMOVE(&uh->head, upp, list); 537135956Sphk delete_unr(uh, upp); 538143283Sphk if (up == upp) 539143283Sphk up = NULL; 540135956Sphk } 541143283Sphk 542143283Sphk /* Merge into ->last if possible */ 543143283Sphk upp = TAILQ_LAST(&uh->head, unrhd); 544143283Sphk if (upp != NULL && upp->ptr == NULL) { 545143283Sphk uh->last += upp->len; 546135956Sphk TAILQ_REMOVE(&uh->head, upp, list); 547135956Sphk delete_unr(uh, upp); 548143283Sphk if (up == upp) 549143283Sphk up = NULL; 550135956Sphk } 551143283Sphk 552143283Sphk /* Try to make bitmaps */ 553143283Sphk while (optimize_unr(uh)) 554143283Sphk continue; 555135956Sphk} 556135956Sphk 557135956Sphk/* 558135956Sphk * Allocate a free unr. 559135956Sphk */ 560143283Sphkint 561143283Sphkalloc_unrl(struct unrhdr *uh) 562135956Sphk{ 563143283Sphk struct unr *up; 564143283Sphk struct unrb *ub; 565135956Sphk u_int x; 566135956Sphk int y; 567135956Sphk 568143283Sphk mtx_assert(uh->mtx, MA_OWNED); 569135956Sphk check_unrhdr(uh, __LINE__); 570143283Sphk x = uh->low + uh->first; 571143283Sphk 572143283Sphk up = TAILQ_FIRST(&uh->head); 573143283Sphk 574135956Sphk /* 575143283Sphk * If we have an ideal split, just adjust the first+last 576135956Sphk */ 577143283Sphk if (up == NULL && uh->last > 0) { 578143283Sphk uh->first++; 579143283Sphk uh->last--; 580135956Sphk uh->busy++; 581135956Sphk return (x); 582135956Sphk } 583135956Sphk 584135956Sphk /* 585143283Sphk * We can always allocate from the first list element, so if we have 586143283Sphk * nothing on the list, we must have run out of unit numbers. 587135956Sphk */ 588143550Sphk if (up == NULL) 589143283Sphk return (-1); 590143283Sphk 591143283Sphk KASSERT(up->ptr != uh, ("UNR first element is allocated")); 592143283Sphk 593143283Sphk if (up->ptr == NULL) { /* free run */ 594143283Sphk uh->first++; 595143283Sphk up->len--; 596143283Sphk } else { /* bitmap */ 597143283Sphk ub = up->ptr; 598143283Sphk KASSERT(ub->busy < up->len, ("UNR bitmap confusion")); 599143283Sphk bit_ffc(ub->map, up->len, &y); 600143283Sphk KASSERT(y != -1, ("UNR corruption: No clear bit in bitmap.")); 601143283Sphk bit_set(ub->map, y); 602143283Sphk ub->busy++; 603143283Sphk x += y; 604143283Sphk } 605135956Sphk uh->busy++; 606143283Sphk collapse_unr(uh, up); 607135956Sphk return (x); 608135956Sphk} 609135956Sphk 610143283Sphkint 611143283Sphkalloc_unr(struct unrhdr *uh) 612143283Sphk{ 613143283Sphk int i; 614143283Sphk 615143283Sphk mtx_lock(uh->mtx); 616143283Sphk i = alloc_unrl(uh); 617171202Skib clean_unrhdrl(uh); 618143283Sphk mtx_unlock(uh->mtx); 619143283Sphk return (i); 620143283Sphk} 621143283Sphk 622209710Sjhstatic int 623209710Sjhalloc_unr_specificl(struct unrhdr *uh, u_int item, void **p1, void **p2) 624209710Sjh{ 625209710Sjh struct unr *up, *upn; 626209710Sjh struct unrb *ub; 627209710Sjh u_int i, last, tl; 628209710Sjh 629209710Sjh mtx_assert(uh->mtx, MA_OWNED); 630209710Sjh 631209710Sjh if (item < uh->low + uh->first || item > uh->high) 632209710Sjh return (-1); 633209710Sjh 634209710Sjh up = TAILQ_FIRST(&uh->head); 635209710Sjh /* Ideal split. */ 636209710Sjh if (up == NULL && item - uh->low == uh->first) { 637209710Sjh uh->first++; 638209710Sjh uh->last--; 639209710Sjh uh->busy++; 640209710Sjh check_unrhdr(uh, __LINE__); 641209710Sjh return (item); 642209710Sjh } 643209710Sjh 644209710Sjh i = item - uh->low - uh->first; 645209710Sjh 646209710Sjh if (up == NULL) { 647209710Sjh up = new_unr(uh, p1, p2); 648209710Sjh up->ptr = NULL; 649209710Sjh up->len = i; 650209710Sjh TAILQ_INSERT_TAIL(&uh->head, up, list); 651209710Sjh up = new_unr(uh, p1, p2); 652209710Sjh up->ptr = uh; 653209710Sjh up->len = 1; 654209710Sjh TAILQ_INSERT_TAIL(&uh->head, up, list); 655209710Sjh uh->last = uh->high - uh->low - i; 656209710Sjh uh->busy++; 657209710Sjh check_unrhdr(uh, __LINE__); 658209710Sjh return (item); 659209710Sjh } else { 660209710Sjh /* Find the item which contains the unit we want to allocate. */ 661209710Sjh TAILQ_FOREACH(up, &uh->head, list) { 662209710Sjh if (up->len > i) 663209710Sjh break; 664209710Sjh i -= up->len; 665209710Sjh } 666209710Sjh } 667209710Sjh 668209710Sjh if (up == NULL) { 669209710Sjh if (i > 0) { 670209710Sjh up = new_unr(uh, p1, p2); 671209710Sjh up->ptr = NULL; 672209710Sjh up->len = i; 673209710Sjh TAILQ_INSERT_TAIL(&uh->head, up, list); 674209710Sjh } 675209710Sjh up = new_unr(uh, p1, p2); 676209710Sjh up->ptr = uh; 677209710Sjh up->len = 1; 678209710Sjh TAILQ_INSERT_TAIL(&uh->head, up, list); 679209710Sjh goto done; 680209710Sjh } 681209710Sjh 682209710Sjh if (is_bitmap(uh, up)) { 683209710Sjh ub = up->ptr; 684209710Sjh if (bit_test(ub->map, i) == 0) { 685209710Sjh bit_set(ub->map, i); 686209710Sjh ub->busy++; 687209710Sjh goto done; 688209710Sjh } else 689209710Sjh return (-1); 690209710Sjh } else if (up->ptr == uh) 691209710Sjh return (-1); 692209710Sjh 693209710Sjh KASSERT(up->ptr == NULL, 694209710Sjh ("alloc_unr_specificl: up->ptr != NULL (up=%p)", up)); 695209710Sjh 696209710Sjh /* Split off the tail end, if any. */ 697209710Sjh tl = up->len - (1 + i); 698209710Sjh if (tl > 0) { 699209710Sjh upn = new_unr(uh, p1, p2); 700209710Sjh upn->ptr = NULL; 701209710Sjh upn->len = tl; 702209710Sjh TAILQ_INSERT_AFTER(&uh->head, up, upn, list); 703209710Sjh } 704209710Sjh 705209710Sjh /* Split off head end, if any */ 706209710Sjh if (i > 0) { 707209710Sjh upn = new_unr(uh, p1, p2); 708209710Sjh upn->len = i; 709209710Sjh upn->ptr = NULL; 710209710Sjh TAILQ_INSERT_BEFORE(up, upn, list); 711209710Sjh } 712209710Sjh up->len = 1; 713209710Sjh up->ptr = uh; 714209710Sjh 715209710Sjhdone: 716209710Sjh last = uh->high - uh->low - (item - uh->low); 717209710Sjh if (uh->last > last) 718209710Sjh uh->last = last; 719209710Sjh uh->busy++; 720209710Sjh collapse_unr(uh, up); 721209710Sjh check_unrhdr(uh, __LINE__); 722209710Sjh return (item); 723209710Sjh} 724209710Sjh 725209710Sjhint 726209710Sjhalloc_unr_specific(struct unrhdr *uh, u_int item) 727209710Sjh{ 728209710Sjh void *p1, *p2; 729209710Sjh int i; 730209710Sjh 731209710Sjh WITNESS_WARN(WARN_GIANTOK | WARN_SLEEPOK, NULL, "alloc_unr_specific"); 732209710Sjh 733209710Sjh p1 = Malloc(sizeof(struct unr)); 734209710Sjh p2 = Malloc(sizeof(struct unr)); 735209710Sjh 736209710Sjh mtx_lock(uh->mtx); 737209710Sjh i = alloc_unr_specificl(uh, item, &p1, &p2); 738209710Sjh mtx_unlock(uh->mtx); 739209710Sjh 740209710Sjh if (p1 != NULL) 741209710Sjh Free(p1); 742209710Sjh if (p2 != NULL) 743209710Sjh Free(p2); 744209710Sjh 745209710Sjh return (i); 746209710Sjh} 747209710Sjh 748135956Sphk/* 749135956Sphk * Free a unr. 750135956Sphk * 751135956Sphk * If we can save unrs by using a bitmap, do so. 752135956Sphk */ 753143283Sphkstatic void 754143283Sphkfree_unrl(struct unrhdr *uh, u_int item, void **p1, void **p2) 755135956Sphk{ 756143283Sphk struct unr *up, *upp, *upn; 757143283Sphk struct unrb *ub; 758143283Sphk u_int pl; 759135956Sphk 760135956Sphk KASSERT(item >= uh->low && item <= uh->high, 761135956Sphk ("UNR: free_unr(%u) out of range [%u...%u]", 762135956Sphk item, uh->low, uh->high)); 763135956Sphk check_unrhdr(uh, __LINE__); 764135956Sphk item -= uh->low; 765143283Sphk upp = TAILQ_FIRST(&uh->head); 766143283Sphk /* 767143283Sphk * Freeing in the ideal split case 768143283Sphk */ 769143283Sphk if (item + 1 == uh->first && upp == NULL) { 770143283Sphk uh->last++; 771143283Sphk uh->first--; 772143283Sphk uh->busy--; 773143283Sphk check_unrhdr(uh, __LINE__); 774143283Sphk return; 775143283Sphk } 776143283Sphk /* 777143283Sphk * Freeing in the ->first section. Create a run starting at the 778143283Sphk * freed item. The code below will subdivide it. 779143283Sphk */ 780143283Sphk if (item < uh->first) { 781143283Sphk up = new_unr(uh, p1, p2); 782143283Sphk up->ptr = uh; 783143283Sphk up->len = uh->first - item; 784143283Sphk TAILQ_INSERT_HEAD(&uh->head, up, list); 785143283Sphk uh->first -= up->len; 786143283Sphk } 787135956Sphk 788143283Sphk item -= uh->first; 789135956Sphk 790143283Sphk /* Find the item which contains the unit we want to free */ 791143283Sphk TAILQ_FOREACH(up, &uh->head, list) { 792143283Sphk if (up->len > item) 793143283Sphk break; 794143283Sphk item -= up->len; 795143283Sphk } 796135956Sphk 797143283Sphk /* Handle bitmap items */ 798143283Sphk if (is_bitmap(uh, up)) { 799143283Sphk ub = up->ptr; 800135956Sphk 801143283Sphk KASSERT(bit_test(ub->map, item) != 0, 802143283Sphk ("UNR: Freeing free item %d (bitmap)\n", item)); 803143283Sphk bit_clear(ub->map, item); 804143283Sphk uh->busy--; 805143283Sphk ub->busy--; 806143283Sphk collapse_unr(uh, up); 807143283Sphk return; 808143283Sphk } 809135956Sphk 810143283Sphk KASSERT(up->ptr == uh, ("UNR Freeing free item %d (run))\n", item)); 811135956Sphk 812143283Sphk /* Just this one left, reap it */ 813143283Sphk if (up->len == 1) { 814143283Sphk up->ptr = NULL; 815143283Sphk uh->busy--; 816143283Sphk collapse_unr(uh, up); 817143283Sphk return; 818143283Sphk } 819135956Sphk 820143283Sphk /* Check if we can shift the item into the previous 'free' run */ 821143283Sphk upp = TAILQ_PREV(up, unrhd, list); 822143283Sphk if (item == 0 && upp != NULL && upp->ptr == NULL) { 823143283Sphk upp->len++; 824143283Sphk up->len--; 825143283Sphk uh->busy--; 826143283Sphk collapse_unr(uh, up); 827143283Sphk return; 828143283Sphk } 829135956Sphk 830143283Sphk /* Check if we can shift the item to the next 'free' run */ 831143283Sphk upn = TAILQ_NEXT(up, list); 832143283Sphk if (item == up->len - 1 && upn != NULL && upn->ptr == NULL) { 833143283Sphk upn->len++; 834143283Sphk up->len--; 835135956Sphk uh->busy--; 836143283Sphk collapse_unr(uh, up); 837143283Sphk return; 838143283Sphk } 839135956Sphk 840143283Sphk /* Split off the tail end, if any. */ 841143283Sphk pl = up->len - (1 + item); 842143283Sphk if (pl > 0) { 843143283Sphk upp = new_unr(uh, p1, p2); 844143283Sphk upp->ptr = uh; 845143283Sphk upp->len = pl; 846143283Sphk TAILQ_INSERT_AFTER(&uh->head, up, upp, list); 847143283Sphk } 848135956Sphk 849143283Sphk /* Split off head end, if any */ 850143283Sphk if (item > 0) { 851143283Sphk upp = new_unr(uh, p1, p2); 852143283Sphk upp->len = item; 853143283Sphk upp->ptr = uh; 854143283Sphk TAILQ_INSERT_BEFORE(up, upp, list); 855143283Sphk } 856143283Sphk up->len = 1; 857143283Sphk up->ptr = NULL; 858143283Sphk uh->busy--; 859143283Sphk collapse_unr(uh, up); 860143283Sphk} 861135956Sphk 862143283Sphkvoid 863143283Sphkfree_unr(struct unrhdr *uh, u_int item) 864143283Sphk{ 865143283Sphk void *p1, *p2; 866135956Sphk 867170949Skib WITNESS_WARN(WARN_GIANTOK | WARN_SLEEPOK, NULL, "free_unr"); 868143283Sphk p1 = Malloc(sizeof(struct unr)); 869143283Sphk p2 = Malloc(sizeof(struct unr)); 870143283Sphk mtx_lock(uh->mtx); 871143283Sphk free_unrl(uh, item, &p1, &p2); 872171202Skib clean_unrhdrl(uh); 873143283Sphk mtx_unlock(uh->mtx); 874143283Sphk if (p1 != NULL) 875143283Sphk Free(p1); 876143283Sphk if (p2 != NULL) 877143283Sphk Free(p2); 878135956Sphk} 879135956Sphk 880143550Sphk#ifndef _KERNEL /* USERLAND test driver */ 881143550Sphk 882135956Sphk/* 883135956Sphk * Simple stochastic test driver for the above functions 884135956Sphk */ 885135956Sphk 886135956Sphkstatic void 887135956Sphkprint_unr(struct unrhdr *uh, struct unr *up) 888135956Sphk{ 889135956Sphk u_int x; 890143283Sphk struct unrb *ub; 891135956Sphk 892135956Sphk printf(" %p len = %5u ", up, up->len); 893135956Sphk if (up->ptr == NULL) 894135956Sphk printf("free\n"); 895135956Sphk else if (up->ptr == uh) 896135956Sphk printf("alloc\n"); 897135956Sphk else { 898143283Sphk ub = up->ptr; 899143283Sphk printf("bitmap(%d) [", ub->busy); 900143283Sphk for (x = 0; x < up->len; x++) { 901143283Sphk if (bit_test(ub->map, x)) 902143283Sphk printf("#"); 903135956Sphk else 904143283Sphk printf(" "); 905135956Sphk } 906135956Sphk printf("]\n"); 907135956Sphk } 908135956Sphk} 909135956Sphk 910135956Sphkstatic void 911135956Sphkprint_unrhdr(struct unrhdr *uh) 912135956Sphk{ 913135956Sphk struct unr *up; 914135956Sphk u_int x; 915135956Sphk 916143283Sphk printf( 917143283Sphk "%p low = %u high = %u first = %u last = %u busy %u chunks = %u\n", 918143283Sphk uh, uh->low, uh->high, uh->first, uh->last, uh->busy, uh->alloc); 919143283Sphk x = uh->low + uh->first; 920135956Sphk TAILQ_FOREACH(up, &uh->head, list) { 921135956Sphk printf(" from = %5u", x); 922135956Sphk print_unr(uh, up); 923135956Sphk if (up->ptr == NULL || up->ptr == uh) 924135956Sphk x += up->len; 925135956Sphk else 926135956Sphk x += NBITS; 927135956Sphk } 928135956Sphk} 929135956Sphk 930209710Sjhstatic void 931209710Sjhtest_alloc_unr(struct unrhdr *uh, u_int i, char a[]) 932209710Sjh{ 933209710Sjh int j; 934209710Sjh 935209710Sjh if (a[i]) { 936209710Sjh printf("F %u\n", i); 937209710Sjh free_unr(uh, i); 938209710Sjh a[i] = 0; 939209710Sjh } else { 940209710Sjh no_alloc = 1; 941209710Sjh j = alloc_unr(uh); 942209710Sjh if (j != -1) { 943209710Sjh a[j] = 1; 944209710Sjh printf("A %d\n", j); 945209710Sjh } 946209710Sjh no_alloc = 0; 947209710Sjh } 948209710Sjh} 949209710Sjh 950209710Sjhstatic void 951209710Sjhtest_alloc_unr_specific(struct unrhdr *uh, u_int i, char a[]) 952209710Sjh{ 953209710Sjh int j; 954209710Sjh 955209710Sjh j = alloc_unr_specific(uh, i); 956209710Sjh if (j == -1) { 957209710Sjh printf("F %u\n", i); 958209710Sjh a[i] = 0; 959209710Sjh free_unr(uh, i); 960209710Sjh } else { 961209710Sjh a[i] = 1; 962209710Sjh printf("A %d\n", j); 963209710Sjh } 964209710Sjh} 965209710Sjh 966135956Sphk/* Number of unrs to test */ 967135956Sphk#define NN 10000 968135956Sphk 969135956Sphkint 970135956Sphkmain(int argc __unused, const char **argv __unused) 971135956Sphk{ 972135956Sphk struct unrhdr *uh; 973143283Sphk u_int i, x, m, j; 974135956Sphk char a[NN]; 975135956Sphk 976143283Sphk setbuf(stdout, NULL); 977143238Sphk uh = new_unrhdr(0, NN - 1, NULL); 978143283Sphk print_unrhdr(uh); 979135956Sphk 980135956Sphk memset(a, 0, sizeof a); 981209710Sjh srandomdev(); 982135956Sphk 983209256Sjh fprintf(stderr, "sizeof(struct unr) %zu\n", sizeof(struct unr)); 984209256Sjh fprintf(stderr, "sizeof(struct unrb) %zu\n", sizeof(struct unrb)); 985209256Sjh fprintf(stderr, "sizeof(struct unrhdr) %zu\n", sizeof(struct unrhdr)); 986143283Sphk fprintf(stderr, "NBITS %d\n", NBITS); 987135956Sphk x = 1; 988143283Sphk for (m = 0; m < NN * 100; m++) { 989143283Sphk j = random(); 990143283Sphk i = (j >> 1) % NN; 991143283Sphk#if 0 992143283Sphk if (a[i] && (j & 1)) 993143283Sphk continue; 994143283Sphk#endif 995209710Sjh if ((random() & 1) != 0) 996209710Sjh test_alloc_unr(uh, i, a); 997209710Sjh else 998209710Sjh test_alloc_unr_specific(uh, i, a); 999209710Sjh 1000135956Sphk if (1) /* XXX: change this for detailed debug printout */ 1001135956Sphk print_unrhdr(uh); 1002135956Sphk check_unrhdr(uh, __LINE__); 1003135956Sphk } 1004143283Sphk for (i = 0; i < NN; i++) { 1005143283Sphk if (a[i]) { 1006143283Sphk printf("C %u\n", i); 1007136945Sphk free_unr(uh, i); 1008143283Sphk print_unrhdr(uh); 1009143283Sphk } 1010143283Sphk } 1011136945Sphk print_unrhdr(uh); 1012136945Sphk delete_unrhdr(uh); 1013135956Sphk return (0); 1014135956Sphk} 1015135956Sphk#endif 1016